From 459831779b7a845c47f79e0697e686ddab01f027 Mon Sep 17 00:00:00 2001 From: Rob Dominguez Date: Fri, 25 Oct 2024 10:38:17 -0500 Subject: [PATCH] Docs: Refactor Getting Started (#731) Co-authored-by: Sean Park-Ross <94021366+seanparkross@users.noreply.github.com> --- docs/_install-the-cli.mdx | 2 - docs/_prereqs.mdx | 25 ++ .../explore/_category_.json | 0 .../explore/explore-your-api.mdx | 6 +- .../explore/index.mdx | 4 +- .../explore/interact.mdx | 6 +- .../explore/monitor.mdx | 8 +- docs/business-logic/introduction.mdx | 10 +- docs/business-logic/typescript.mdx | 4 +- docs/collaboration/_category_.json | 8 + .../best-practices.mdx | 0 .../collaborate => collaboration}/comment.mdx | 20 +- .../federation.mdx | 12 +- .../collaborate => collaboration}/index.mdx | 4 +- .../collaborate => collaboration}/invite.mdx | 2 +- docs/collaboration/overview.mdx | 53 +++ docs/connectors/clickhouse/index.mdx | 2 +- docs/connectors/external-apis/graphql.mdx | 2 +- docs/connectors/external-apis/open-api.mdx | 2 +- docs/connectors/mongodb/index.mdx | 2 +- docs/connectors/postgresql/index.mdx | 2 +- docs/connectors/postgresql/local-postgres.mdx | 4 +- .../native-operations/custom-mutations.mdx | 2 +- .../native-operations/custom-queries.mdx | 2 +- .../native-operations/vector-search.mdx | 5 +- .../deployment/01-create-a-project.mdx | 4 +- .../deployment/02-create-a-subgraph.mdx | 6 +- .../deployment/03-deploy-a-connector.mdx | 0 .../deployment/04-deploy-your-supergraph.mdx | 10 +- docs/deployment/deployment/_category_.json | 4 + .../_clickHouse/_03-deploy-a-connector.mdx | 16 +- .../_go/_03-deploy-a-connector.mdx | 24 +- .../_graphql/_03-deploy-a-connector.mdx | 16 +- .../_mongoDB/_03-deploy-a-connector.mdx | 16 +- .../_openAPI/_03-deploy-a-connector.mdx | 16 +- .../_postgreSQL/_03-deploy-a-connector.mdx | 16 +- .../_python/_03-deploy-a-connector.mdx | 16 +- .../_typeScript/_03-deploy-a-connector.mdx | 16 +- .../deployment/index.mdx | 4 +- docs/deployment/private/_category_.json | 2 +- .../private/data-plane-collaboration.mdx | 36 +- .../private/self-hosted-deployment.mdx | 6 +- docs/deployment/serverless.mdx | 3 +- docs/federation/_category_.json | 2 +- docs/federation/architecture.mdx | 19 +- docs/federation/basics.mdx | 12 +- .../independent-subgraph-development.mdx | 20 +- docs/federation/overview.mdx | 2 +- .../build/00-prerequisites.mdx | 78 ---- .../build/01-init-supergraph.mdx | 156 -------- .../build/02-init-subgraph.mdx | 162 --------- .../01-connect-a-source.mdx | 17 - .../02-create-source-metadata.mdx | 104 ------ .../build/03-connect-to-data/_category_.json | 4 - .../_clickHouse/_01-connect-a-source.mdx | 168 --------- .../_graphql/_01-connect-a-source.mdx | 254 ------------- .../_mongoDB/_01-connect-a-source.mdx | 190 ---------- .../_openAPI/_01-connect-a-source.mdx | 202 ----------- .../_postgreSQL/_01-connect-a-source.mdx | 188 ---------- .../build/03-connect-to-data/index.mdx | 35 -- .../build/04-build-your-api.mdx | 126 ------- .../build/05-add-permissions.mdx | 337 ------------------ .../build/06-add-business-logic.mdx | 17 - .../build/07-create-a-relationship.mdx | 167 --------- docs/getting-started/build/08-mutate-data.mdx | 23 -- docs/getting-started/build/_category_.json | 4 - .../_clickHouse/_08-mutate-data.mdx | 180 ---------- .../_go/_06-add-business-logic.mdx | 242 ------------- .../_graphql/_08-mutate-data.mdx | 148 -------- .../_mongoDB/_08-mutate-data.mdx | 161 --------- .../_openAPI/_08-mutate-data.mdx | 150 -------- .../_postgreSQL/_08-mutate-data.mdx | 316 ---------------- .../_python/_06-add-business-logic.mdx | 249 ------------- .../_typescript/_06-add-business-logic.mdx | 245 ------------- docs/getting-started/build/index.mdx | 36 -- .../collaborate/_category_.json | 4 - .../deployment/_category_.json | 4 - docs/getting-started/overview.mdx | 34 +- docs/getting-started/quickstart.mdx | 42 +-- docs/getting-started/with-clickhouse.mdx | 170 +++++++++ docs/getting-started/with-mongodb.mdx | 156 ++++++++ docs/getting-started/with-others.mdx | 21 ++ docs/getting-started/with-postgresql.mdx | 167 +++++++++ docs/graphql-api/graphql-schema-diff.mdx | 2 +- docs/project-configuration/builds.mdx | 12 +- docs/support/faq.mdx | 2 +- .../feature-availability/api-features.mdx | 7 +- docs/upgrade/feature-availability/tooling.mdx | 4 +- docs/upgrade/index.mdx | 9 +- src/components/CodeStep/index.tsx | 49 ++- src/components/CodeStep/styles.css | 34 +- .../OverviewIconCard/styles.module.css | 6 +- src/components/databaseDocs/contentLoader.tsx | 72 +--- src/theme/DocSidebarItem/utils.js | 9 +- 94 files changed, 914 insertions(+), 4302 deletions(-) create mode 100644 docs/_prereqs.mdx rename docs/{getting-started => basics}/explore/_category_.json (100%) rename docs/{getting-started => basics}/explore/explore-your-api.mdx (96%) rename docs/{getting-started => basics}/explore/index.mdx (97%) rename docs/{getting-started => basics}/explore/interact.mdx (94%) rename docs/{getting-started => basics}/explore/monitor.mdx (87%) create mode 100644 docs/collaboration/_category_.json rename docs/{getting-started/collaborate => collaboration}/best-practices.mdx (100%) rename docs/{getting-started/collaborate => collaboration}/comment.mdx (84%) rename docs/{getting-started/collaborate => collaboration}/federation.mdx (94%) rename docs/{getting-started/collaborate => collaboration}/index.mdx (77%) rename docs/{getting-started/collaborate => collaboration}/invite.mdx (98%) create mode 100644 docs/collaboration/overview.mdx rename docs/{getting-started => deployment}/deployment/01-create-a-project.mdx (97%) rename docs/{getting-started => deployment}/deployment/02-create-a-subgraph.mdx (84%) rename docs/{getting-started => deployment}/deployment/03-deploy-a-connector.mdx (100%) rename docs/{getting-started => deployment}/deployment/04-deploy-your-supergraph.mdx (89%) create mode 100644 docs/deployment/deployment/_category_.json rename docs/{getting-started => deployment}/deployment/_databaseDocs/_clickHouse/_03-deploy-a-connector.mdx (82%) rename docs/{getting-started => deployment}/deployment/_databaseDocs/_go/_03-deploy-a-connector.mdx (70%) rename docs/{getting-started => deployment}/deployment/_databaseDocs/_graphql/_03-deploy-a-connector.mdx (75%) rename docs/{getting-started => deployment}/deployment/_databaseDocs/_mongoDB/_03-deploy-a-connector.mdx (82%) rename docs/{getting-started => deployment}/deployment/_databaseDocs/_openAPI/_03-deploy-a-connector.mdx (82%) rename docs/{getting-started => deployment}/deployment/_databaseDocs/_postgreSQL/_03-deploy-a-connector.mdx (79%) rename docs/{getting-started => deployment}/deployment/_databaseDocs/_python/_03-deploy-a-connector.mdx (75%) rename docs/{getting-started => deployment}/deployment/_databaseDocs/_typeScript/_03-deploy-a-connector.mdx (76%) rename docs/{getting-started => deployment}/deployment/index.mdx (91%) delete mode 100644 docs/getting-started/build/00-prerequisites.mdx delete mode 100644 docs/getting-started/build/01-init-supergraph.mdx delete mode 100644 docs/getting-started/build/02-init-subgraph.mdx delete mode 100644 docs/getting-started/build/03-connect-to-data/01-connect-a-source.mdx delete mode 100644 docs/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx delete mode 100644 docs/getting-started/build/03-connect-to-data/_category_.json delete mode 100644 docs/getting-started/build/03-connect-to-data/_databaseDocs/_clickHouse/_01-connect-a-source.mdx delete mode 100644 docs/getting-started/build/03-connect-to-data/_databaseDocs/_graphql/_01-connect-a-source.mdx delete mode 100644 docs/getting-started/build/03-connect-to-data/_databaseDocs/_mongoDB/_01-connect-a-source.mdx delete mode 100644 docs/getting-started/build/03-connect-to-data/_databaseDocs/_openAPI/_01-connect-a-source.mdx delete mode 100644 docs/getting-started/build/03-connect-to-data/_databaseDocs/_postgreSQL/_01-connect-a-source.mdx delete mode 100644 docs/getting-started/build/03-connect-to-data/index.mdx delete mode 100644 docs/getting-started/build/04-build-your-api.mdx delete mode 100644 docs/getting-started/build/05-add-permissions.mdx delete mode 100644 docs/getting-started/build/06-add-business-logic.mdx delete mode 100644 docs/getting-started/build/07-create-a-relationship.mdx delete mode 100644 docs/getting-started/build/08-mutate-data.mdx delete mode 100644 docs/getting-started/build/_category_.json delete mode 100644 docs/getting-started/build/_databaseDocs/_clickHouse/_08-mutate-data.mdx delete mode 100644 docs/getting-started/build/_databaseDocs/_go/_06-add-business-logic.mdx delete mode 100644 docs/getting-started/build/_databaseDocs/_graphql/_08-mutate-data.mdx delete mode 100644 docs/getting-started/build/_databaseDocs/_mongoDB/_08-mutate-data.mdx delete mode 100644 docs/getting-started/build/_databaseDocs/_openAPI/_08-mutate-data.mdx delete mode 100644 docs/getting-started/build/_databaseDocs/_postgreSQL/_08-mutate-data.mdx delete mode 100644 docs/getting-started/build/_databaseDocs/_python/_06-add-business-logic.mdx delete mode 100644 docs/getting-started/build/_databaseDocs/_typescript/_06-add-business-logic.mdx delete mode 100644 docs/getting-started/build/index.mdx delete mode 100644 docs/getting-started/collaborate/_category_.json delete mode 100644 docs/getting-started/deployment/_category_.json create mode 100644 docs/getting-started/with-clickhouse.mdx create mode 100644 docs/getting-started/with-mongodb.mdx create mode 100644 docs/getting-started/with-others.mdx create mode 100644 docs/getting-started/with-postgresql.mdx diff --git a/docs/_install-the-cli.mdx b/docs/_install-the-cli.mdx index f343a22cc..42b904ea6 100644 --- a/docs/_install-the-cli.mdx +++ b/docs/_install-the-cli.mdx @@ -4,8 +4,6 @@ import CodeBlock from "@theme/CodeBlock"; import Version from "@site/src/components/CliVersion"; import Admonition from "@theme/Admonition"; - - diff --git a/docs/_prereqs.mdx b/docs/_prereqs.mdx new file mode 100644 index 000000000..b1091f040 --- /dev/null +++ b/docs/_prereqs.mdx @@ -0,0 +1,25 @@ +import Tabs from "@theme/Tabs"; +import TabItem from "@theme/TabItem"; +import CodeBlock from "@theme/CodeBlock"; +import Version from "@site/src/components/CliVersion"; +import Admonition from "@theme/Admonition"; +import InstallTheCli from "@site/docs/_install-the-cli.mdx"; + +## Prerequisites + +**Install the DDN CLI** + + + +**Install [Docker](https://docs.docker.com/engine/install/)** + +The Docker based workflow helps you iterate and develop locally without deploying any changes to Hasura DDN, making the +development experience faster and your feedback loops shorter. **You'll need Docker Compose `v2.27.1` or later.** + +**Validate the installation** + +You can verify that the DDN CLI is installed correctly by running: + +```sh +ddn doctor +``` diff --git a/docs/getting-started/explore/_category_.json b/docs/basics/explore/_category_.json similarity index 100% rename from docs/getting-started/explore/_category_.json rename to docs/basics/explore/_category_.json diff --git a/docs/getting-started/explore/explore-your-api.mdx b/docs/basics/explore/explore-your-api.mdx similarity index 96% rename from docs/getting-started/explore/explore-your-api.mdx rename to docs/basics/explore/explore-your-api.mdx index f6aad923d..49f0cd814 100644 --- a/docs/getting-started/explore/explore-your-api.mdx +++ b/docs/basics/explore/explore-your-api.mdx @@ -35,8 +35,8 @@ The console's β€” Hasura's web-based GUI β€” explorer page offers: - **Enhanced Collaboration:** Foster better communication and coordination across teams with a shared, intuitive visual tool. -You can also use the console to [interact with](/getting-started/explore/interact.mdx) and -[monitor](/getting-started/explore/monitor.mdx) your API. +You can also use the console to [interact with](/basics/explore/interact.mdx) and [monitor](/basics/explore/monitor.mdx) +your API. ## Step 1. Explore your supergraph @@ -194,4 +194,4 @@ activity without making changes. ## Next steps Now that you have the knowledge to explore your API, let's see how easy it is to -[interact with and test your API](/getting-started/explore/interact.mdx) using the GraphiQL explorer. +[interact with and test your API](/basics/explore/interact.mdx) using the GraphiQL explorer. diff --git a/docs/getting-started/explore/index.mdx b/docs/basics/explore/index.mdx similarity index 97% rename from docs/getting-started/explore/index.mdx rename to docs/basics/explore/index.mdx index 39b5773db..c920a1be7 100644 --- a/docs/getting-started/explore/index.mdx +++ b/docs/basics/explore/index.mdx @@ -66,5 +66,5 @@ want to easily see what Hasura can do, you can start exploring a finished superg To explore a sample eCommerce supergraph API built on Hasura DDN ([source code](https://github.com/hasura/ddn-sample-app)), simply head to the [DDN console](https://console.hasura.io/projects). You should see an "eCommerce App" sample project under **Sample -Projects**. Then, to get started with the console, head [here](/getting-started/explore/explore-your-api.mdx) to learn -how to start using the explorer. +Projects**. Then, to get started with the console, head [here](/basics/explore/explore-your-api.mdx) to learn how to +start using the explorer. diff --git a/docs/getting-started/explore/interact.mdx b/docs/basics/explore/interact.mdx similarity index 94% rename from docs/getting-started/explore/interact.mdx rename to docs/basics/explore/interact.mdx index 6e3f9dc2b..7729c28d6 100644 --- a/docs/getting-started/explore/interact.mdx +++ b/docs/basics/explore/interact.mdx @@ -34,8 +34,8 @@ With Hasura DDN, you can use our GraphiQL explorer to create and test queries di - **Built-in Tracing:** Instantly trace queries to identify performance bottlenecks and optimize efficiently. - **Streamlined Workflow:** Save time and reduce context-switching by having everything you need in a single interface. -You can also use the console to [visualize](/getting-started/explore/explore-your-api.mdx) and -[monitor](/getting-started/explore/monitor.mdx) your API. +You can also use the console to [visualize](/basics/explore/explore-your-api.mdx) and +[monitor](/basics/explore/monitor.mdx) your API. ## Step 1. Test your supergraph API @@ -107,4 +107,4 @@ The API expects `content-type` to be present. Removing it will return an error. ## Next steps Now that you've tested your API, viewed traces, and explored the schema, learn how to -[monitor your API's performance](/getting-started/explore/monitor.mdx). +[monitor your API's performance](/basics/explore/monitor.mdx). diff --git a/docs/getting-started/explore/monitor.mdx b/docs/basics/explore/monitor.mdx similarity index 87% rename from docs/getting-started/explore/monitor.mdx rename to docs/basics/explore/monitor.mdx index 4f8f23861..ce2f25d6d 100644 --- a/docs/getting-started/explore/monitor.mdx +++ b/docs/basics/explore/monitor.mdx @@ -41,8 +41,8 @@ Aside from the information below, you can dig into our observability options [he ::: -You can also use the console to [visualize](/getting-started/explore/explore-your-api.mdx) and -[interact with](/getting-started/explore/interact.mdx) your API. +You can also use the console to [visualize](/basics/explore/explore-your-api.mdx) and +[interact with](/basics/explore/interact.mdx) your API. From the sidenav of the console β€” Hasura's web-based GUI β€” you can access the `Insights` page. @@ -56,11 +56,11 @@ You can access key performance metrics for both your project and for each build: You can also click the `Traces` tab at the top of the page and get query-by-query trace data for each request, seeing all the same details as when using the -[`View Trace` button in the GraphiQL explorer](/getting-started/explore/interact.mdx#traces). +[`View Trace` button in the GraphiQL explorer](/basics/explore/interact.mdx#traces). ## Next steps With a complete picture of everything the console has to offer, aren't you ready to build your first supergraph?! Head -to the [Build an API section](/getting-started/build/prerequisites/) to get started πŸŽ‰ +to the [Build an API section](/getting-started/quickstart.mdx) to get started πŸŽ‰ diff --git a/docs/business-logic/introduction.mdx b/docs/business-logic/introduction.mdx index 93d1a3669..89de82ebb 100644 --- a/docs/business-logic/introduction.mdx +++ b/docs/business-logic/introduction.mdx @@ -83,10 +83,10 @@ To get started with Hasura's lambda connectors and enjoy the benefits of: Explore the following resources: -- [Using the TypeScript connector](/getting-started/build/06-add-business-logic.mdx?db=TypeScript). -- [Using the Python connector](/getting-started/build/06-add-business-logic.mdx?db=Python). -- [Using the Go connector](/getting-started/build/06-add-business-logic.mdx?db=Go). +- [Using the TypeScript connector](/getting-started/quickstart.mdx). +- [Using the Python connector](/getting-started/quickstart.mdx). +- [Using the Go connector](/getting-started/quickstart.mdx). - Learn how to integrate existing REST services using the - [OpenAPI connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx?db=OpenAPI). + [OpenAPI connector](/getting-started/quickstart.mdx). - Learn how to integrate existing GraphQL services using the - [GraphQL connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx?db=GraphQL). + [GraphQL connector](/getting-started/quickstart.mdx). diff --git a/docs/business-logic/typescript.mdx b/docs/business-logic/typescript.mdx index 39f506e52..63f2eafb6 100644 --- a/docs/business-logic/typescript.mdx +++ b/docs/business-logic/typescript.mdx @@ -322,7 +322,7 @@ GraphQL API. :::info More examples To see more examples, check out our getting started guide for -[adding custom business logic](/getting-started/build/06-add-business-logic.mdx) or database-specific guides for -[creating mutations](/getting-started/build/08-mutate-data.mdx). +[adding custom business logic](/getting-started/quickstart.mdx) or database-specific guides for +[creating mutations](/getting-started/quickstart.mdx). ::: diff --git a/docs/collaboration/_category_.json b/docs/collaboration/_category_.json new file mode 100644 index 000000000..d392452e2 --- /dev/null +++ b/docs/collaboration/_category_.json @@ -0,0 +1,8 @@ +{ + "label": "Collaboration", + "position": 6.5, + "className": "collaboration-icon", + "customProps": { + "sidebar_pathname": "collaboration" + } +} diff --git a/docs/getting-started/collaborate/best-practices.mdx b/docs/collaboration/best-practices.mdx similarity index 100% rename from docs/getting-started/collaborate/best-practices.mdx rename to docs/collaboration/best-practices.mdx diff --git a/docs/getting-started/collaborate/comment.mdx b/docs/collaboration/comment.mdx similarity index 84% rename from docs/getting-started/collaborate/comment.mdx rename to docs/collaboration/comment.mdx index 5f786b516..cd9dcd215 100644 --- a/docs/getting-started/collaborate/comment.mdx +++ b/docs/collaboration/comment.mdx @@ -43,16 +43,16 @@ You can add comments on various objects from your metadata. You can add comments to the following areas: -### Explorer Tab +### Explorer Tab #### Supergraph Page - +
#### Subgraph Page - +
#### Models --> General @@ -104,7 +104,6 @@ You can add comments to the following areas: - ## Notifications To ensure effective collaboration, you can tag subgraph collaborators in your comments: @@ -112,22 +111,23 @@ To ensure effective collaboration, you can tag subgraph collaborators in your co 1. Type `@` in your comment. 2. Select the collaborator you want to notify. -Tagged collaborators will receive an email with your comments and a notification on the notification hub on the console, +Tagged collaborators will receive an email with your comments and a notification on the notification hub on the console, keeping them informed of any discussions or questions. -The notification hub can be found in the top right corner of the console. On clicking the comments button, you will see all the comments -where you are tagged in one place. The messages will be grouped based on the underlying commenting thread. You can click on a particular comment (deep linking) and go to the original thread on the console. -You can also delete notifications from that menu. +The notification hub can be found in the top right corner of the console. On clicking the comments button, you will see +all the comments where you are tagged in one place. The messages will be grouped based on the underlying commenting +thread. You can click on a particular comment (deep linking) and go to the original thread on the console. You can also +delete notifications from that menu. {/* */} ![Alt text](/img/get-started/comments_notification.png) +

:::info Invite collaborators -You can learn how to invite collaborators [here](/getting-started/collaborate/invite.mdx). - +You can learn how to invite collaborators [here](/collaboration/invite.mdx). ::: diff --git a/docs/getting-started/collaborate/federation.mdx b/docs/collaboration/federation.mdx similarity index 94% rename from docs/getting-started/collaborate/federation.mdx rename to docs/collaboration/federation.mdx index 8e083954f..9bd8b1ae9 100644 --- a/docs/getting-started/collaborate/federation.mdx +++ b/docs/collaboration/federation.mdx @@ -29,8 +29,8 @@ In order to add subgraph collaborators, your project must be a [DDN Advanced pro Hasura DDN provides a concept of [subgraph](/project-configuration/subgraphs/#introduction) which allows you to manage your Hasura metadata more efficiently by allowing it to be split based on ownership. Hasura DDN allows multiple users -and teams to [work together as collaborators](/getting-started/collaborate/invite.mdx) on subgraphs by assigning each -user specific roles and permissions. +and teams to [work together as collaborators](/collaboration/invite.mdx) on subgraphs by assigning each user +specific roles and permissions. In this guide, you'll learn how to onboard multiple teams into a collaborative project using a supergraph. This approach allows for efficient collaboration making it easier to manage and scale your project as more teams are added. @@ -49,8 +49,8 @@ The Fulfillment team is already using Hasura DDN and has created a supergraph pr [models](/supergraph-modeling/models.mdx) enabled. When the UX Team joins the project, you'll use the [CLI](/cli/installation.mdx) to create a new subgraph for them, named -`ux` and [invite developers](/getting-started/collaborate/invite.mdx) from the team to collaborate on this subgraph via -the console. +`ux` and [invite developers](/collaboration/invite.mdx) from the team to collaborate on this subgraph via the +console. The UX team will start by initializing a new local DDN project with their own new version control repository and set the project context to match the existing project set up by the Fulfillment team. @@ -76,14 +76,14 @@ ddn project subgraph create ### Step 2. Invite collaborators -[Follow the steps ](/getting-started/collaborate/invite.mdx#invite-collaborators) to invite subgraph collaborators. Take +[Follow the steps ](/collaboration/invite.mdx#invite-collaborators) to invite subgraph collaborators. Take care to select the appropriate role β€” either **Admin** or **Developer** β€” for each subgraph team member. ## As a subgraph developer, collaborate on the new subgraph ### Step 1. Accept the invite -[Reference these docs](/getting-started/collaborate/invite.mdx#accept-invite) to accept an invitation and explore the +[Reference these docs](/collaboration/invite.mdx#accept-invite) to accept an invitation and explore the existing supergraph. ### Step 2. Create a new local project, supergraph and subgraph diff --git a/docs/getting-started/collaborate/index.mdx b/docs/collaboration/index.mdx similarity index 77% rename from docs/getting-started/collaborate/index.mdx rename to docs/collaboration/index.mdx index b90bb5c16..fff5a1f76 100644 --- a/docs/getting-started/collaborate/index.mdx +++ b/docs/collaboration/index.mdx @@ -22,5 +22,5 @@ service. ## Next steps -- [Learn how to invite others](getting-started/collaborate/invite.mdx) to a project -- Read about the [best practices](getting-started/collaborate/best-practices.mdx) for developing as a team +- [Learn how to invite others](/collaboration/invite.mdx) to a project +- Read about the [best practices](/collaboration/best-practices.mdx) for developing as a team diff --git a/docs/getting-started/collaborate/invite.mdx b/docs/collaboration/invite.mdx similarity index 98% rename from docs/getting-started/collaborate/invite.mdx rename to docs/collaboration/invite.mdx index b5fb1da09..d79385d0b 100644 --- a/docs/getting-started/collaborate/invite.mdx +++ b/docs/collaboration/invite.mdx @@ -107,7 +107,7 @@ From your new project, you can explore the console by: The owner of the project most likely has a Git repository with the project's contents available on a service such as GitHub. To run the supergraph locally, and make contributions to the deployed supergraph, -[pick up here](/getting-started/build/build-your-api) in our getting started docs. +[pick up here](/getting-started/quickstart.mdx) in our getting started docs. ## Allow users to request access {#request-access} diff --git a/docs/collaboration/overview.mdx b/docs/collaboration/overview.mdx new file mode 100644 index 000000000..79d40eefa --- /dev/null +++ b/docs/collaboration/overview.mdx @@ -0,0 +1,53 @@ +--- +sidebar_position: 1 +sidebar_label: Overview +description: + "Hasura DDN allows multiple users and teams to work together as collaborators on projects by assigning each user specific roles and permissions." +keywords: + - hasura ddn + - graphql api +hide_table_of_contents: true +--- + +import { OverviewTopSectionIconNoVideo } from "@site/src/components/OverviewTopSectionIconNoVideo"; +import { OverviewPlainCard } from "@site/src/components/OverviewPlainCard"; +import Icon from '@site/static/icons/features/collaborators.svg'; + +# Collaboration + +} + links={[]} + intro={ +
+

+ Hasura DDN allows multiple users and teams to work together as collaborators on projects by assigning each user specific roles and permissions. +

+
+ } +/> + +
+ + + + + + + +
diff --git a/docs/connectors/clickhouse/index.mdx b/docs/connectors/clickhouse/index.mdx index 5c7064251..266048f29 100644 --- a/docs/connectors/clickhouse/index.mdx +++ b/docs/connectors/clickhouse/index.mdx @@ -27,7 +27,7 @@ overview of the features of the ClickHouse connector and how to configure it in ## Getting started To get started with ClickHouse and Hasura DDN, check out -[this guide](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx?db=ClickHouse). +[this guide](/getting-started/quickstart.mdx). :::info Prerequisites diff --git a/docs/connectors/external-apis/graphql.mdx b/docs/connectors/external-apis/graphql.mdx index cfda57b08..2c6bd9aaf 100644 --- a/docs/connectors/external-apis/graphql.mdx +++ b/docs/connectors/external-apis/graphql.mdx @@ -54,7 +54,7 @@ Below, you'll find a matrix of all supported features for the GraphQL connector: ## Getting started To get started with the GraphQL Data Connector, you can follow the -[guide here](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx?db=GraphQL). +[guide here](/getting-started/quickstart.mdx). :::info Prerequisites diff --git a/docs/connectors/external-apis/open-api.mdx b/docs/connectors/external-apis/open-api.mdx index e66e1d4a2..cc59e3c26 100644 --- a/docs/connectors/external-apis/open-api.mdx +++ b/docs/connectors/external-apis/open-api.mdx @@ -37,7 +37,7 @@ mutate data. ## Getting started To get started with the OpenAPI Lambda Connector, you can follow the -[guide here](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx?db=OpenAPI). +[guide here](/getting-started/quickstart.mdx). :::info Prerequisites diff --git a/docs/connectors/mongodb/index.mdx b/docs/connectors/mongodb/index.mdx index aa3429879..dede0efbf 100644 --- a/docs/connectors/mongodb/index.mdx +++ b/docs/connectors/mongodb/index.mdx @@ -37,7 +37,7 @@ configuration, the connector supports introspecting the database via the Hasura ## Getting started To get started with MongoDB and Hasura DDN, check out -[this guide](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx?db=MongoDB). +[this guide](/getting-started/quickstart.mdx). :::info Prerequisites diff --git a/docs/connectors/postgresql/index.mdx b/docs/connectors/postgresql/index.mdx index 8a5474183..d30760088 100644 --- a/docs/connectors/postgresql/index.mdx +++ b/docs/connectors/postgresql/index.mdx @@ -54,7 +54,7 @@ Our policy is to support all versions which are indicated as being supported in ## Getting started To get started with PostgreSQL and Hasura DDN, check out -[this guide](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx?db=PostgreSQL). +[this guide](/getting-started/quickstart.mdx). :::info Prerequisites diff --git a/docs/connectors/postgresql/local-postgres.mdx b/docs/connectors/postgresql/local-postgres.mdx index 09095a069..bf477c040 100644 --- a/docs/connectors/postgresql/local-postgres.mdx +++ b/docs/connectors/postgresql/local-postgres.mdx @@ -114,7 +114,7 @@ You can now create tables, add data, etc. on the Postgres database and iterate o You can connect the Postgres database to your supergraph using the [Postgres data connector](connectors/postgresql/index.mdx) by following the steps in -[this guide](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx?db=PostgreSQL) and providing the database +[this guide](/getting-started/quickstart.mdx) and providing the database connection string as the following: ```bash title="Connection string to connect to local supergraph" @@ -122,7 +122,7 @@ postgresql://user:password@local.hasura.dev:5432/dev ``` As you iterate on your database schema you can update your supergraph metadata using the steps in -[this guide](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx). +[this guide](/getting-started/quickstart.mdx). :::warning Connecting to your local database from the cloud diff --git a/docs/connectors/postgresql/native-operations/custom-mutations.mdx b/docs/connectors/postgresql/native-operations/custom-mutations.mdx index 47ae6cad7..c5b88e265 100644 --- a/docs/connectors/postgresql/native-operations/custom-mutations.mdx +++ b/docs/connectors/postgresql/native-operations/custom-mutations.mdx @@ -67,7 +67,7 @@ CLI to add it to the connector configuration. For example: ``` 4. Add a source entity as described in our - [add source entities](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) guide. + [add source entities](/getting-started/quickstart.mdx) guide. ## List Native Operations diff --git a/docs/connectors/postgresql/native-operations/custom-queries.mdx b/docs/connectors/postgresql/native-operations/custom-queries.mdx index 05ffaf086..0de27cd9c 100644 --- a/docs/connectors/postgresql/native-operations/custom-queries.mdx +++ b/docs/connectors/postgresql/native-operations/custom-queries.mdx @@ -51,7 +51,7 @@ to add it to the connector configuration. For example: ``` 4. Add a source entity as described in our - [add source entities](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) guide. + [add source entities](/getting-started/quickstart.mdx) guide. ## List Native Operations diff --git a/docs/connectors/postgresql/native-operations/vector-search.mdx b/docs/connectors/postgresql/native-operations/vector-search.mdx index 1d36c016c..3300dcd2e 100644 --- a/docs/connectors/postgresql/native-operations/vector-search.mdx +++ b/docs/connectors/postgresql/native-operations/vector-search.mdx @@ -72,9 +72,8 @@ relationship from this projection to the full products table and query it flexib ### Hasura metadata First, you would need to track the newly-available collection from the step above as types / models in your project's -metadata ([see instructions here](/docs/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx)). Then, -you would need to add a relationship from the generated `ProductsVectorDistance` type to the main `Products` model, like -this: +metadata ([see instructions here](/getting-started/quickstart.mdx)). Then, you would need to add a relationship from the +generated `ProductsVectorDistance` type to the main `Products` model, like this: ```yaml kind: Relationship diff --git a/docs/getting-started/deployment/01-create-a-project.mdx b/docs/deployment/deployment/01-create-a-project.mdx similarity index 97% rename from docs/getting-started/deployment/01-create-a-project.mdx rename to docs/deployment/deployment/01-create-a-project.mdx index ceb79c87f..5b64168d6 100644 --- a/docs/getting-started/deployment/01-create-a-project.mdx +++ b/docs/deployment/deployment/01-create-a-project.mdx @@ -24,8 +24,8 @@ Eventually, this can be used to serve your API. :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) ::: diff --git a/docs/getting-started/deployment/02-create-a-subgraph.mdx b/docs/deployment/deployment/02-create-a-subgraph.mdx similarity index 84% rename from docs/getting-started/deployment/02-create-a-subgraph.mdx rename to docs/deployment/deployment/02-create-a-subgraph.mdx index 987e20e68..ff777d63e 100644 --- a/docs/getting-started/deployment/02-create-a-subgraph.mdx +++ b/docs/deployment/deployment/02-create-a-subgraph.mdx @@ -23,8 +23,8 @@ commands, relationships, or permissions. :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) ::: @@ -49,4 +49,4 @@ Thus, for each subgraph you have in local metadata, you'll create the companion ## Next steps With a subgraph provisioned on Hasura DDN, you can now begin -[building and deploying your connectors](/getting-started/deployment/deploy-a-connector) that service this subgraph. +[building and deploying your connectors](/deployment/deployment/deploy-a-connector) that service this subgraph. diff --git a/docs/getting-started/deployment/03-deploy-a-connector.mdx b/docs/deployment/deployment/03-deploy-a-connector.mdx similarity index 100% rename from docs/getting-started/deployment/03-deploy-a-connector.mdx rename to docs/deployment/deployment/03-deploy-a-connector.mdx diff --git a/docs/getting-started/deployment/04-deploy-your-supergraph.mdx b/docs/deployment/deployment/04-deploy-your-supergraph.mdx similarity index 89% rename from docs/getting-started/deployment/04-deploy-your-supergraph.mdx rename to docs/deployment/deployment/04-deploy-your-supergraph.mdx index 3f52811ec..1c5d26eed 100644 --- a/docs/getting-started/deployment/04-deploy-your-supergraph.mdx +++ b/docs/deployment/deployment/04-deploy-your-supergraph.mdx @@ -25,11 +25,11 @@ service! :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) ::: diff --git a/docs/deployment/deployment/_category_.json b/docs/deployment/deployment/_category_.json new file mode 100644 index 000000000..63bb65b10 --- /dev/null +++ b/docs/deployment/deployment/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Hasura Cloud Deployment", + "position": 4 +} diff --git a/docs/getting-started/deployment/_databaseDocs/_clickHouse/_03-deploy-a-connector.mdx b/docs/deployment/deployment/_databaseDocs/_clickHouse/_03-deploy-a-connector.mdx similarity index 82% rename from docs/getting-started/deployment/_databaseDocs/_clickHouse/_03-deploy-a-connector.mdx rename to docs/deployment/deployment/_databaseDocs/_clickHouse/_03-deploy-a-connector.mdx index f1f2f52b8..10706a62f 100644 --- a/docs/getting-started/deployment/_databaseDocs/_clickHouse/_03-deploy-a-connector.mdx +++ b/docs/deployment/deployment/_databaseDocs/_clickHouse/_03-deploy-a-connector.mdx @@ -12,12 +12,12 @@ follow the steps below.** :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- A new or existing [subgraph on Hasura DDN](/getting-started/deployment/02-create-a-subgraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) +- A new or existing [subgraph on Hasura DDN](/deployment/deployment/02-create-a-subgraph.mdx) ::: @@ -76,5 +76,5 @@ is reachable by DDN, your connector will be able to communicate between your API ## Next steps If you have other connectors needed for a supergraph, repeat these steps for each connector. Otherwise, you're ready to -[create a new build of your supergraph](/getting-started/deployment/04-deploy-your-supergraph.mdx) and deploy it to -Hasura DDN! +[create a new build of your supergraph](/deployment/deployment/04-deploy-your-supergraph.mdx) and deploy it to Hasura +DDN! diff --git a/docs/getting-started/deployment/_databaseDocs/_go/_03-deploy-a-connector.mdx b/docs/deployment/deployment/_databaseDocs/_go/_03-deploy-a-connector.mdx similarity index 70% rename from docs/getting-started/deployment/_databaseDocs/_go/_03-deploy-a-connector.mdx rename to docs/deployment/deployment/_databaseDocs/_go/_03-deploy-a-connector.mdx index df4ba4979..c1c3842de 100644 --- a/docs/getting-started/deployment/_databaseDocs/_go/_03-deploy-a-connector.mdx +++ b/docs/deployment/deployment/_databaseDocs/_go/_03-deploy-a-connector.mdx @@ -3,8 +3,8 @@ import Thumbnail from "@site/src/components/Thumbnail"; ## What's about to happen? Connectors are deployed independently of your supergraph and of each other. Hasura DDN will host your connectors for -you, ensuring rapid delivery of data from your API to your consumers. **For each Go connector in a project, -follow the steps below.** +you, ensuring rapid delivery of data from your API to your consumers. **For each Go connector in a project, follow the +steps below.** @@ -12,12 +12,12 @@ follow the steps below.** :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- A new or existing [subgraph on Hasura DDN](/getting-started/deployment/02-create-a-subgraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) +- A new or existing [subgraph on Hasura DDN](/deployment/deployment/02-create-a-subgraph.mdx) ::: @@ -51,11 +51,11 @@ You're now ready to build and deploy your supergraph! ## What did this do? -The steps above built and deployed your Go connector to Hasura DDN. This means you can now use this connector to -enrich or transform data on your DDN-hosted supergraph πŸŽ‰ +The steps above built and deployed your Go connector to Hasura DDN. This means you can now use this connector to enrich +or transform data on your DDN-hosted supergraph πŸŽ‰ ## Next steps If you have other connectors needed for a supergraph, repeat these steps for each connector. Otherwise, you're ready to -[create a new build of your supergraph](/getting-started/deployment/04-deploy-your-supergraph.mdx) and deploy it to -Hasura DDN! +[create a new build of your supergraph](/deployment/deployment/04-deploy-your-supergraph.mdx) and deploy it to Hasura +DDN! diff --git a/docs/getting-started/deployment/_databaseDocs/_graphql/_03-deploy-a-connector.mdx b/docs/deployment/deployment/_databaseDocs/_graphql/_03-deploy-a-connector.mdx similarity index 75% rename from docs/getting-started/deployment/_databaseDocs/_graphql/_03-deploy-a-connector.mdx rename to docs/deployment/deployment/_databaseDocs/_graphql/_03-deploy-a-connector.mdx index 2ee47ed0b..7fc5a2fc8 100644 --- a/docs/getting-started/deployment/_databaseDocs/_graphql/_03-deploy-a-connector.mdx +++ b/docs/deployment/deployment/_databaseDocs/_graphql/_03-deploy-a-connector.mdx @@ -12,12 +12,12 @@ the steps below.** :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- A new or existing [subgraph on Hasura DDN](/getting-started/deployment/02-create-a-subgraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) +- A new or existing [subgraph on Hasura DDN](/deployment/deployment/02-create-a-subgraph.mdx) ::: @@ -55,5 +55,5 @@ The steps above built and deployed your GraphQL connector to Hasura DDN. ## Next steps If you have other connectors needed for a supergraph, repeat these steps for each connector. Otherwise, you're ready to -[create a new build of your supergraph](/getting-started/deployment/04-deploy-your-supergraph.mdx) and deploy it to -Hasura DDN! +[create a new build of your supergraph](/deployment/deployment/04-deploy-your-supergraph.mdx) and deploy it to Hasura +DDN! diff --git a/docs/getting-started/deployment/_databaseDocs/_mongoDB/_03-deploy-a-connector.mdx b/docs/deployment/deployment/_databaseDocs/_mongoDB/_03-deploy-a-connector.mdx similarity index 82% rename from docs/getting-started/deployment/_databaseDocs/_mongoDB/_03-deploy-a-connector.mdx rename to docs/deployment/deployment/_databaseDocs/_mongoDB/_03-deploy-a-connector.mdx index f7ce40a86..cf9d0817a 100644 --- a/docs/getting-started/deployment/_databaseDocs/_mongoDB/_03-deploy-a-connector.mdx +++ b/docs/deployment/deployment/_databaseDocs/_mongoDB/_03-deploy-a-connector.mdx @@ -12,12 +12,12 @@ the steps below.** :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- A new or existing [subgraph on Hasura DDN](/getting-started/deployment/02-create-a-subgraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) +- A new or existing [subgraph on Hasura DDN](/deployment/deployment/02-create-a-subgraph.mdx) ::: @@ -75,5 +75,5 @@ reachable by DDN, your connector will be able to communicate between your API an ## Next steps If you have other connectors needed for a supergraph, repeat these steps for each connector. Otherwise, you're ready to -[create a new build of your supergraph](/getting-started/deployment/04-deploy-your-supergraph.mdx) and deploy it to -Hasura DDN! +[create a new build of your supergraph](/deployment/deployment/04-deploy-your-supergraph.mdx) and deploy it to Hasura +DDN! diff --git a/docs/getting-started/deployment/_databaseDocs/_openAPI/_03-deploy-a-connector.mdx b/docs/deployment/deployment/_databaseDocs/_openAPI/_03-deploy-a-connector.mdx similarity index 82% rename from docs/getting-started/deployment/_databaseDocs/_openAPI/_03-deploy-a-connector.mdx rename to docs/deployment/deployment/_databaseDocs/_openAPI/_03-deploy-a-connector.mdx index 34d3ceeb9..af180903b 100644 --- a/docs/getting-started/deployment/_databaseDocs/_openAPI/_03-deploy-a-connector.mdx +++ b/docs/deployment/deployment/_databaseDocs/_openAPI/_03-deploy-a-connector.mdx @@ -12,12 +12,12 @@ the steps below.** :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- A new or existing [subgraph on Hasura DDN](/getting-started/deployment/02-create-a-subgraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) +- A new or existing [subgraph on Hasura DDN](/deployment/deployment/02-create-a-subgraph.mdx) ::: @@ -79,5 +79,5 @@ reachable by DDN, your connector will be able to communicate between your API an ## Next steps If you have other connectors needed for a supergraph, repeat these steps for each connector. Otherwise, you're ready to -[create a new build of your supergraph](/getting-started/deployment/04-deploy-your-supergraph.mdx) and deploy it to -Hasura DDN! +[create a new build of your supergraph](/deployment/deployment/04-deploy-your-supergraph.mdx) and deploy it to Hasura +DDN! diff --git a/docs/getting-started/deployment/_databaseDocs/_postgreSQL/_03-deploy-a-connector.mdx b/docs/deployment/deployment/_databaseDocs/_postgreSQL/_03-deploy-a-connector.mdx similarity index 79% rename from docs/getting-started/deployment/_databaseDocs/_postgreSQL/_03-deploy-a-connector.mdx rename to docs/deployment/deployment/_databaseDocs/_postgreSQL/_03-deploy-a-connector.mdx index 4130968a8..78687a62b 100644 --- a/docs/getting-started/deployment/_databaseDocs/_postgreSQL/_03-deploy-a-connector.mdx +++ b/docs/deployment/deployment/_databaseDocs/_postgreSQL/_03-deploy-a-connector.mdx @@ -12,12 +12,12 @@ follow the steps below.** :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- A new or existing [subgraph on Hasura DDN](/getting-started/deployment/02-create-a-subgraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) +- A new or existing [subgraph on Hasura DDN](/deployment/deployment/02-create-a-subgraph.mdx) ::: @@ -66,5 +66,5 @@ is reachable by DDN, your connector will be able to communicate between your API ## Next steps If you have other connectors needed for a supergraph, repeat these steps for each connector. Otherwise, you're ready to -[create a new build of your supergraph](/getting-started/deployment/04-deploy-your-supergraph.mdx) and deploy it to -Hasura DDN! +[create a new build of your supergraph](/deployment/deployment/04-deploy-your-supergraph.mdx) and deploy it to Hasura +DDN! diff --git a/docs/getting-started/deployment/_databaseDocs/_python/_03-deploy-a-connector.mdx b/docs/deployment/deployment/_databaseDocs/_python/_03-deploy-a-connector.mdx similarity index 75% rename from docs/getting-started/deployment/_databaseDocs/_python/_03-deploy-a-connector.mdx rename to docs/deployment/deployment/_databaseDocs/_python/_03-deploy-a-connector.mdx index bd1c63990..c60aedf20 100644 --- a/docs/getting-started/deployment/_databaseDocs/_python/_03-deploy-a-connector.mdx +++ b/docs/deployment/deployment/_databaseDocs/_python/_03-deploy-a-connector.mdx @@ -12,12 +12,12 @@ the steps below.** :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- A new or existing [subgraph on Hasura DDN](/getting-started/deployment/02-create-a-subgraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) +- A new or existing [subgraph on Hasura DDN](/deployment/deployment/02-create-a-subgraph.mdx) ::: @@ -50,5 +50,5 @@ enrich or transform data on your DDN-hosted supergraph πŸŽ‰ ## Next steps If you have other connectors needed for a supergraph, repeat these steps for each connector. Otherwise, you're ready to -[create a new build of your supergraph](/getting-started/deployment/04-deploy-your-supergraph.mdx) and deploy it to -Hasura DDN! +[create a new build of your supergraph](/deployment/deployment/04-deploy-your-supergraph.mdx) and deploy it to Hasura +DDN! diff --git a/docs/getting-started/deployment/_databaseDocs/_typeScript/_03-deploy-a-connector.mdx b/docs/deployment/deployment/_databaseDocs/_typeScript/_03-deploy-a-connector.mdx similarity index 76% rename from docs/getting-started/deployment/_databaseDocs/_typeScript/_03-deploy-a-connector.mdx rename to docs/deployment/deployment/_databaseDocs/_typeScript/_03-deploy-a-connector.mdx index ec23beb0e..d3d9e617c 100644 --- a/docs/getting-started/deployment/_databaseDocs/_typeScript/_03-deploy-a-connector.mdx +++ b/docs/deployment/deployment/_databaseDocs/_typeScript/_03-deploy-a-connector.mdx @@ -12,12 +12,12 @@ follow the steps below.** :::tip Required -- [DDN CLI](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A new or existing [data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- A new or existing [subgraph on Hasura DDN](/getting-started/deployment/02-create-a-subgraph.mdx) +- [DDN CLI](/getting-started/quickstart.mdx) +- A new or existing [supergraph](/getting-started/quickstart.mdx) +- A new or existing [subgraph](/getting-started/quickstart.mdx) +- A new or existing [data connector](/getting-started/quickstart.mdx) +- A new or existing [project](/deployment/deployment/01-create-a-project.mdx) +- A new or existing [subgraph on Hasura DDN](/deployment/deployment/02-create-a-subgraph.mdx) ::: @@ -57,5 +57,5 @@ enrich or transform data on your DDN-hotsed supergraph πŸŽ‰ ## Next steps If you have other connectors needed for a supergraph, repeat these steps for each connector. Otherwise, you're ready to -[create a new build of your supergraph](/getting-started/deployment/04-deploy-your-supergraph.mdx) and deploy it to -Hasura DDN! +[create a new build of your supergraph](/deployment/deployment/04-deploy-your-supergraph.mdx) and deploy it to Hasura +DDN! diff --git a/docs/getting-started/deployment/index.mdx b/docs/deployment/deployment/index.mdx similarity index 91% rename from docs/getting-started/deployment/index.mdx rename to docs/deployment/deployment/index.mdx index 7785ba130..fb173de6d 100644 --- a/docs/getting-started/deployment/index.mdx +++ b/docs/deployment/deployment/index.mdx @@ -1,6 +1,6 @@ --- sidebar_position: 6 -sidebar_label: Deploy a supergraph to Hasura DDN +sidebar_label: Hasura Cloud description: "Learn how to get started with Hasura DDN and your GraphQL API." keywords: - hasura ddn @@ -10,7 +10,7 @@ keywords: - guide --- -# Deploy +# Hasura Cloud To deploy your API, you need to collectively deploy your connectors and supergraph. Hasura DDN can host these for you, enabling easy management via the Hasura Console on Hasura DDN. We'll follow these steps over the next several pages: diff --git a/docs/deployment/private/_category_.json b/docs/deployment/private/_category_.json index e8108404b..7af4dd900 100644 --- a/docs/deployment/private/_category_.json +++ b/docs/deployment/private/_category_.json @@ -1,4 +1,4 @@ { "label": "Private Deployment", - "position": 4 + "position": 5 } diff --git a/docs/deployment/private/data-plane-collaboration.mdx b/docs/deployment/private/data-plane-collaboration.mdx index 9d43177a8..f63976382 100644 --- a/docs/deployment/private/data-plane-collaboration.mdx +++ b/docs/deployment/private/data-plane-collaboration.mdx @@ -1,8 +1,7 @@ --- sidebar_position: 4 sidebar_label: Data Plane Collaboration -description: - "Learn how to use the data plane collaboration feature for Private DDN." +description: "Learn how to use the data plane collaboration feature for Private DDN." keywords: - hasura ddn - private ddn @@ -17,47 +16,50 @@ import Thumbnail from "@site/src/components/Thumbnail"; ## Introduction -The Data Plane Collaboration feature allows you to manage and facilitate collaboration on the [data plane](/deployment/architecture.mdx#data-plane) -for which you are an owner. The owner of the data plane can invite other users to collaborate/create projects on the data plane. -The invited users can create and manage projects on the data plane. This feature enables users to invite, accept, -reject, and remove collaborators for cross-team collaboration. +The Data Plane Collaboration feature allows you to manage and facilitate collaboration on the +[data plane](/deployment/architecture.mdx#data-plane) for which you are an owner. The owner of the data plane can invite +other users to collaborate/create projects on the data plane. The invited users can create and manage projects on the +data plane. This feature enables users to invite, accept, reject, and remove collaborators for cross-team collaboration. ### How to get started with Data Plane Collaboration #### Inviting a collaborator -To invite a user to your data plane, you need to open the [Data Plane Management Dashboard](https://console.hasura.io/data-plane/). The -dashboard will show all available data planes. Select the data plane for which you have the `owner` role. Click `Invite -Collaborator`. Enter the email address of the user you want to invite and click `Send Invites`. The invited user -will receive an email with an invitation link. +To invite a user to your data plane, you need to open the +[Data Plane Management Dashboard](https://console.hasura.io/data-plane/). The dashboard will show all available data +planes. Select the data plane for which you have the `owner` role. Click `Invite Collaborator`. Enter the email address +of the user you want to invite and click `Send Invites`. The invited user will receive an email with an invitation link. #### Accepting or Rejecting an invitation -The invited user can accept or reject the invitation by clicking on the invitation link received in the email or going to the -[Data Plane Management Dashboard](https://console.hasura.io/data-plane/). The dashboard will show all the invites received by the user. The -user can accept or reject the invitation by clicking `Accept` or `Decline`. +The invited user can accept or reject the invitation by clicking on the invitation link received in the email or going +to the [Data Plane Management Dashboard](https://console.hasura.io/data-plane/). The dashboard will show all the invites +received by the user. The user can accept or reject the invitation by clicking `Accept` or `Decline`. #### Removing a collaborator -You can remove any data plane collaborator by going to the [Data Plane Management Dashboard](https://console.hasura.io/data-plane/). Select the data plane -for which you have the `owner` role, you'll be able to see all the collaborators of the data plane. Click `Remove` to remove the collaborator. +You can remove any data plane collaborator by going to the +[Data Plane Management Dashboard](https://console.hasura.io/data-plane/). Select the data plane for which you have the +`owner` role, you'll be able to see all the collaborators of the data plane. Click `Remove` to remove the collaborator. ### Data Plane Collaboration permissions Currently there are only two roles available for data plane collaboration: + - `owner`: The owner of the data plane has full access to the data plane and can invite or remove collaborators. - `member`: The member of the data plane can create projects on the data plane and manage them. ### What can a collaborator do? -A collaborator can create projects on the data plane and manage them. To create the project in a data plane, the collaborator needs to -select the data plane while creating the project using the [Hasura DDN CLI](/getting-started/deployment/create-a-project/). +A collaborator can create projects on the data plane and manage them. To create the project in a data plane, the +collaborator needs to select the data plane while creating the project using the +[Hasura DDN CLI](/deployment/deployment/create-a-project/). ``` ddn project create --data-plane-id --plan diff --git a/docs/deployment/private/self-hosted-deployment.mdx b/docs/deployment/private/self-hosted-deployment.mdx index 46b6eeb24..cea606b27 100644 --- a/docs/deployment/private/self-hosted-deployment.mdx +++ b/docs/deployment/private/self-hosted-deployment.mdx @@ -21,11 +21,11 @@ Documentation here targets customers who want to self host and self manage their Before continuing, ensure you go through the following checklist and confirm that you meet all the requirements -- [DDN CLI](/getting-started/build/prerequisites#step-1-install-and-authorize-the-hasura-cli) (Latest) -- [Docker v2.27.1](/getting-started/build/prerequisites/#step-3-install-docker-compose-v2271-or-greater) (Or greater) +- [DDN CLI](/getting-started/quickstart.mdx) (Latest) +- [Docker v2.27.1](/getting-started/quickstart.mdx) (Or greater) - You can also run `ddn doctor` to confirm that you meet the minimum requirements. - [Helm3](https://helm.sh/docs/intro/install/) (Prefer latest) -- [Hasura VS Code Extension](/getting-started/build/prerequisites#install-lsp) (Recommended, but not required) +- [Hasura VS Code Extension](/getting-started/quickstart.mdx) (Recommended, but not required) - Access to a Kubernetes cluster - Ability to build and push images that can then be pulled down from the Kubernetes cluster - A user account on the Hasura DDN Control Plane diff --git a/docs/deployment/serverless.mdx b/docs/deployment/serverless.mdx index 180ac1cd6..20665a173 100644 --- a/docs/deployment/serverless.mdx +++ b/docs/deployment/serverless.mdx @@ -43,5 +43,4 @@ setting up any of this infrastructure yourself. Read more about how this works o ## Get started To get started with Hasura DDN in Serverless Edge deployment, head to our -[getting started guide](getting-started/build/index.mdx). - +[getting started guide](/getting-started/quickstart.mdx). diff --git a/docs/federation/_category_.json b/docs/federation/_category_.json index b0e0d23cb..a1c887904 100644 --- a/docs/federation/_category_.json +++ b/docs/federation/_category_.json @@ -1,5 +1,5 @@ { - "label": "Federation", + "label": "Data Federation", "position": 7, "className": "federation-icon", "customProps": { diff --git a/docs/federation/architecture.mdx b/docs/federation/architecture.mdx index bdcc82473..84523cbc8 100644 --- a/docs/federation/architecture.mdx +++ b/docs/federation/architecture.mdx @@ -28,9 +28,9 @@ Federation in Hasura DDN upgrades how you build and manage your API. It is the process of combining multiple subgraphs with multiple data sources into a single supergraph to create a unified GraphQL API that provides access to all your data domains through a single endpoint. -When coupled with the [collaboration](/getting-started/collaborate/invite.mdx) features in Hasura DDN, this -architecture enables more collaborative workflows and allows teams to independently develop and deploy subgraphs -while maintaining strong governance over the development process. +When coupled with the [collaboration](/collaboration/invite.mdx) features in Hasura DDN, this architecture +enables more collaborative workflows and allows teams to independently develop and deploy subgraphs while maintaining +strong governance over the development process. :::warning DDN Advanced plan required @@ -120,10 +120,11 @@ For a detailed overview of data connectors, check out our [Data Connectors Overv Defining a [relationship](/supergraph-modeling/relationships.mdx) allows you to make queries across linked information within and between subgraphs. -As always when authoring metadata, the -[Hasura VS Code extension](https://marketplace.visualstudio.com/items?itemName=HasuraHQ.hasura) can assist with -auto-complete and validation. When working with relationships across subgraphs in other repositories, there are some -differences to be aware of. Find out more about cross-repo relationships [here](/federation/cross-repo-relationships.mdx). +As always when authoring metadata, the +[Hasura VS Code extension](https://marketplace.visualstudio.com/items?itemName=HasuraHQ.hasura) can assist with +auto-complete and validation. When working with relationships across subgraphs in other repositories, there are some +differences to be aware of. Find out more about cross-repo relationships +[here](/federation/cross-repo-relationships.mdx). ## Example @@ -145,7 +146,7 @@ access to all data domains through a single GraphQL endpoint. ## Next steps -- Learn more about git workflows when building supergraphs in [single-repo or multi-repo](/federation/independent-subgraph-development.mdx) - setups. +- Learn more about git workflows when building supergraphs in + [single-repo or multi-repo](/federation/independent-subgraph-development.mdx) setups. - Learn more about [subgraph prefixing](/federation/subgraph-prefixing.mdx) to avoid naming collisions in the schema. - Learn more about [cross-repo relationships](/federation/cross-repo-relationships.mdx) to link data in subgraphs across diff --git a/docs/federation/basics.mdx b/docs/federation/basics.mdx index 141ad4d7f..31f52f58e 100644 --- a/docs/federation/basics.mdx +++ b/docs/federation/basics.mdx @@ -18,12 +18,12 @@ supergraph to create a unified GraphQL API that provides access to all your data With Hasura DDN's **collaboration** features, you can add collaborators to either the supergraph or subgraph level. -A supergraph API build is a collection of one or more subgraph builds. +A supergraph API build is a collection of one or more subgraph builds. ## Add a subgraph to a supergraph To add a subgraph to a supergraph, you need to have a -[supergraph project initialized](/getting-started/build/01-init-supergraph.mdx). +[supergraph project initialized](/getting-started/quickstart.mdx). By running `ddn supergraph init .` you would have both an `app` and `globals` subgraph by default. @@ -34,10 +34,10 @@ ddn subgraph init my_new_subgraph --dir ./my_new_subgraph --target-supergraph ./ ``` You can read more about adding a new subgraph to a supergraph in the -[Getting Started guide](/getting-started/build/02-init-subgraph.mdx). +[Getting Started guide](/getting-started/quickstart.mdx). -Once a subgraph has been added and built on Hasura DDN, -[subgraph collaborators](/getting-started/collaborate/invite.mdx) can be added to it. +Once a subgraph has been added and built on Hasura DDN, [subgraph collaborators](/collaboration/invite.mdx) +can be added to it. ## Add collaborators @@ -45,7 +45,7 @@ As a project owner, or supergraph admin, you can invite collaborators to either As a subgraph admin, you can invite collaborators to your subgraph. -Read more about [collaboration](/getting-started/collaborate/invite.mdx) and the available roles in Hasura DDN. +Read more about [collaboration](/collaboration/invite.mdx) and the available roles in Hasura DDN. ## Next steps diff --git a/docs/federation/independent-subgraph-development.mdx b/docs/federation/independent-subgraph-development.mdx index f3ff703b2..043e27191 100644 --- a/docs/federation/independent-subgraph-development.mdx +++ b/docs/federation/independent-subgraph-development.mdx @@ -30,9 +30,9 @@ other teams. This is where independent subgraph, multi-repo federation shines on Subgraphs in this multi-repo federation are managed in their **own repository** for added governance, control and clean separation from other teams work. Users and teams can be added to individual subgraphs on Hasura DDN as -[admins or developers](/getting-started/collaborate/invite.mdx), allowing them to work independently on their subgraph -and data connectors without affecting other subgraphs. Subgraphs would also typically be named for and given -responsibility over the data domain they encapsulate, such as `users`, `orders`, `products`, etc. +[admins or developers](/collaboration/invite.mdx), allowing them to work independently on their subgraph and +data connectors without affecting other subgraphs. Subgraphs would also typically be named for and given responsibility +over the data domain they encapsulate, such as `users`, `orders`, `products`, etc. :::warning DDN Advanced plan required @@ -53,8 +53,7 @@ with a multi-repo setup. - A local independent subgraph development project context will reference the DDN project used for collaboration. - Each subgraph is namespaced and internal metadata objects cannot conflict with other subgraphs. However, the GraphQL API is where the subgraphs meet and conflicts can occur with root field and type names. Prefixing subgraphs will - remedy this automatically or it can be managed manually. -::: + remedy this automatically or it can be managed manually. ::: ## Creating a new project with independent subgraph development @@ -75,14 +74,14 @@ globals subgraph itself. ::: 1. As a supergraph admin, [create a supergraph normally](/getting-started/quickstart.mdx), - [deploy](/getting-started/deployment/index.mdx) and apply on Hasura DDN, as per the - [getting started guide](/getting-started/build/index.mdx). + [deploy](/deployment/deployment/index.mdx) and apply on Hasura DDN, as per the + [getting started guide](/getting-started/quickstart.mdx). 2. Create any other subgraphs which other teams will work on as placeholders so that they can be invited to them. You can do this with [`ddn project subgraph create [flags]`](/cli/commands/ddn_subgraph_add.mdx) 3. Push the supergraph to a new Git repository. Do not add collaborators to this repo, they will be added to / create their own subgraph repositories. 4. Invite subgraph collaborators to the Hasura DDN project with - [Subgraph Admin or Subgraph Developer permissions](/getting-started/collaborate/invite.mdx). + [Subgraph Admin or Subgraph Developer permissions](/collaboration/invite.mdx). ### Joining the project @@ -92,7 +91,7 @@ the supergraph context to reference the collaborative project on DDN. The supergraph enables local development but only contains their subgraph. They can then create subgraph builds on the collaborative DDN project and, if permissions allow, apply the subgraph to the collaborative supergraph. -1. [Accept the invitation](/getting-started/collaborate/invite.mdx) to the Hasura DDN supergraph project. +1. [Accept the invitation](/collaboration/invite.mdx) to the Hasura DDN supergraph project. 2. Create a new Git repository for a new supergraph. 3. Initialize a new supergraph with a subgraph locally. Make sure the subgraph is named the same as the subgraph you were invited to work on. You can do this with: `ddn supergraph init . --create-subgraph ` @@ -123,8 +122,7 @@ single DDN project with independent subgraph development, you can do so by follo 1. Choose which project will be the main project and which will be the subgraph project. 2. In the main project, create a new subgraph placeholder for the subgraph project with [`ddn project subgraph create [flags]`](/cli/commands/ddn_subgraph_add.mdx) -3. On DDN, invite the subgraph project collaborators with - [subgraph permissions](/getting-started/collaborate/invite.mdx). +3. On DDN, invite the subgraph project collaborators with [subgraph permissions](/collaboration/invite.mdx). 4. Once the subgraph collaborators have accepted the invitation, they can [set the project context](/project-configuration/contexts/) to the main project with `ddn context set project `. diff --git a/docs/federation/overview.mdx b/docs/federation/overview.mdx index ef9c585e1..a2416d030 100644 --- a/docs/federation/overview.mdx +++ b/docs/federation/overview.mdx @@ -14,7 +14,7 @@ import { OverviewTopSectionIconNoVideo } from "@site/src/components/OverviewTopS import { OverviewPlainCard } from "@site/src/components/OverviewPlainCard"; import Icon from "@site/static/icons/data_federation.svg"; -# Federation +# Data Federation } diff --git a/docs/getting-started/build/00-prerequisites.mdx b/docs/getting-started/build/00-prerequisites.mdx deleted file mode 100644 index f8546f9c6..000000000 --- a/docs/getting-started/build/00-prerequisites.mdx +++ /dev/null @@ -1,78 +0,0 @@ ---- -sidebar_position: 1.5 -sidebar_label: Prerequisites -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import InstallTheCli from "@site/docs/_install-the-cli.mdx"; - -# Prerequisites - -Before continuing, ensure you have the following pieces of software at the correct minimum versions installed on your -machine: - -## Step 1. Install and authorize the Hasura CLI - - - -### Step 1.1 Login with the CLI - -```bash title="After installation, from any directory, run the following command to authenticate your CLI session with Hasura DDN." -ddn auth login -``` - -A new window will open either asking you to sign in or authenticating your current session and allow you to return to -the CLI. - -## Step 2. Install the Hasura VS Code extension {#install-lsp} - -If you don't already have [Visual Studio Code](https://code.visualstudio.com/download) installed, we recommend you -download and install it. Then, install the -[Hasura VS Code extension](https://marketplace.visualstudio.com/items?itemName=HasuraHQ.hasura) so that you can make use -of features like autocomplete, contextual suggestions, and inline validation to enhance your development experience. - -## Step 3. Install Docker Compose v2.27.1 or greater - -Finally, install [Docker](https://docs.docker.com/engine/install/), which will be used for local development. This helps -you iterate and develop locally without deploying any changes to Hasura DDN, making the development experience faster -and your feedback loops shorter. - -:::warning Docker version requirement - -Docker Compose `v2.27.1` or greater is required to run local services. To check your version of Docker Compose, run: - -```bash -docker compose version -``` - -[Click here](https://docs.docker.com/engine/install/) to download the latest version of Docker which includes Docker -Compose. - -If you have [Docker Desktop](https://www.docker.com/products/docker-desktop/) installed on your system you can upgrade -from inside the application. - -::: - -:::warning Console-compatible browsers - -Your browser settings, privacy tools or browser extensions may prevent the console from accessing your local Hasura -instance. This could be due to features designed to protect your privacy and security. Should you encounter one of these -issues, we recommend disabling these settings for the `console.hasura.io` domain. - -[Chrome](https://www.google.com/chrome/) and [Firefox](https://www.mozilla.org/en-US/firefox/new/) are the recommended -browsers for the best experience with the Hasura console, including for local development. - -::: - -## Next Steps - -At this point, you're ready to [create your first supergraph](/getting-started/build/01-init-supergraph.mdx)! - -**Alternatively**, if you've been invited to an _existing_ supergraph repository, you might want to -[add a new subgraph](./02-init-subgraph.mdx), [connect a data source](./03-connect-to-data/index.mdx), -[create a relationship](./07-create-a-relationship.mdx), or [add some business logic](./06-add-business-logic.mdx). diff --git a/docs/getting-started/build/01-init-supergraph.mdx b/docs/getting-started/build/01-init-supergraph.mdx deleted file mode 100644 index 9eff92808..000000000 --- a/docs/getting-started/build/01-init-supergraph.mdx +++ /dev/null @@ -1,156 +0,0 @@ ---- -sidebar_position: 2 -sidebar_label: Create a supergraph -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import Thumbnail from "@site/src/components/Thumbnail"; - -# Create a Supergraph - -## What's about to happen? - -We're going to create a [**supergraph**](/support/glossary.mdx#supergraph). - -Your supergraph is the composite of all your subgraphs, their various data sources and business logic, and when we -"build" a supergraph, we create a GraphQL API. - - - -## Steps - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) - -::: - -### Step 1. Initialize your supergraph - -Your supergraph will be initialized in a new directory `mysupergraph`. Remember to change your working directory to this new directory for the following steps. - -```bash title="Run:" -ddn supergraph init mysupergraph -cd mysupergraph -``` - -### Step 2. Start your supergraph - -**With the Docker daemon running on your machine**, you can immediately start the Hasura Engine, your supergraph and its -accompanying services. - -```bash title="Run:" -ddn run docker-start -``` - -
- - The CLI and the `build` directory; click here to read how these work. - - The CLI automatically creates a `build` directory for you when running the `ddn supergraph init` command. This - directory contains all of the JSON necessary to configure and serve your supergraph. The JSON files within it are - already referenced for you in the docker `compose.yaml` file, allowing you to start your services straight away. - - The `docker-start` command is defined in your `hasura/context.yaml`. This command sets the `HASURA_DDN_PAT` - environment variable using a value retrieved from the command `ddn auth print-pat` and then uses Docker Compose to - start the Hasura services defined in the `compose.yaml` file. Your PAT is your personal access token generated by - Hasura DDN and used by the CLI. -
- -
- - Running the Docker daemon as a non-root user - - If you're running the Docker daemon as a non-root user (rootless mode), `v26.0` now allows you to disable the default - `host-loopback-isolation` which prevents communication back to the host machine. - -To disable this feature, set the environment variable `DOCKERD_ROOTLESS_ROOTLESSKIT_DISABLE_HOST_LOOPBACK` to `false` -before starting the daemon. - -This will now allow container-to-host communication via `10.0.2.2` address. - -You should also amend this line in your `compose.yaml` files for both the main services and connectors to reflect this -change: - -```yaml -extra_hosts: - - local.hasura.dev=host-gateway -``` - -to: - -```yaml -extra_hosts: - - local.hasura.dev=10.0.2.2 -``` - -
- -### Step 3. View your supergraph API - -In a new terminal window, you can launch your local project's console using the CLI. - -```bash title="From a new terminal window, run:" -ddn console --local -``` - -This will open [`https://console.hasura.io/local/graphql`](https://console.hasura.io/local/graphql) and allow you to -view your (still empty) API running on your own machine using the Hasura console! - -![Hasura Console showing an empty schema](/img/get-started/console-init-supergraph.png) - -:::tip Privacy settings in some browsers - -Your browser settings, privacy tools or browser extensions may prevent the Console from accessing your local Hasura -instance. This could be due to features designed to protect your privacy and security. Should you encounter one of these -issues, we recommend disabling these settings for the `console.hasura.io` domain. - -[Chrome](https://www.google.com/chrome/) and [Firefox](https://www.mozilla.org/en-US/firefox/new/) are the recommended -browsers for the best experience with the Hasura Console including for local development. - -::: - -### Step 4. Create a new build of your supergraph - -Hasura utilizes a system of [immutable builds](/project-configuration/builds.mdx) which provide a stable snapshot of -your API at any point in your development process. - -```bash title="Let's create a new one:" -ddn supergraph build local -``` - -The above command regenerates all the JSON for our supergraph. As we passed the `local` subcommand, this will output the -files to our local `./engine` directory by default, which is referenced in our Docker `compose.yaml`. We won't see any -difference to our deployed API from running this now, but once we start to add subgraphs and data sources we'll need to -run this command to update our API. - -As a feature of convenience, when initializing the supergraph, we also set the `context` in the CLI. This removes the -need to set frequently used default values (such as the flag `--supergraph` or `--subgraph`) when running CLI commands. - -You can learn more about using context [here](/cli/commands/ddn_context.mdx). - -## What did this do? - -The CLI created all the files needed for local and cloud development in your local project directory, including a docker -compose file for local development and helper files for VS Code. We then started the Hasura Engine and all the services -needed to run your supergraph locally and viewed the Hasura console to see our empty API. Finally, we set the context -for our supergraph and created a new β€” albeit empty β€” build of our supergraph. - -## Starting a new supergraph or joining an existing one - -`ddn supergraph init` will always be the first command you run when starting a new project. However, you may have -arrived here after being invited to join an existing project. In that case, your supergraph will already exist; you can -simply clone the repository containing your supergraph and pick up with the next section. - -Regardless of how you've arrived at your supergraph, you'll have needed to create a build and then run it using Docker. -This is the typical workflow when starting a new project or picking up with an existing one. - -## Next steps - -The most common next step from here is to [add a new subgraph](/getting-started/build/02-init-subgraph.mdx). diff --git a/docs/getting-started/build/02-init-subgraph.mdx b/docs/getting-started/build/02-init-subgraph.mdx deleted file mode 100644 index 65a6ba1b0..000000000 --- a/docs/getting-started/build/02-init-subgraph.mdx +++ /dev/null @@ -1,162 +0,0 @@ ---- -sidebar_position: 3 -sidebar_label: Create a subgraph -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import Thumbnail from "@site/src/components/Thumbnail"; - -# Create a Subgraph - -## What's about to happen? - -We're going to add a subgraph to our supergraph. - -A [**subgraph**](/project-configuration/subgraphs.mdx) is a way of organizing your data and allows you to connect -multiple data sources to your supergraph. A supergraph must have at least one subgraph, at least if you want it to do -anything. πŸ˜‰ - -:::info By default - -When you initialize a new supergraph, the CLI will create a default subgraph called `app`. However, you can follow this -guide to add subsequent subgraphs, extending your API. - -::: - - - -## Steps - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) - -::: - -### Step 1. Init a subgraph - -```bash title="Run the following command, passing the desired name and location to store the metadata for your subgraph:" -ddn subgraph init my_subgraph --dir ./my_subgraph --target-supergraph ./supergraph.yaml -``` - -The CLI will respond with a success message (and a hint to add a connector to the subgraph). - -We're passing a few values to the `subgraph init` command: - -**Subgraph name:** - -We're naming the subgraph `my_subgraph` in this example. - -**Directory: `--dir`** - -We're specifying the directory where the subgraph will be created. The CLI will create this directory. - -**Target supergraph: `--target-supergraph`** - -For convenience we're specifying the supergraph(s) to which this subgraph will be added. Here we're specifying both our -local and cloud supergraphs which were created when we initialized our supergraph. The CLI will edit these supergraph -configuration files and add details of our subgraph to both of them. - -#### What did `subgraph init` do? - -The `subgraph init` command created a new subgraph directory and a `subgraph.yaml` file in the root of the new subgraph -directory. - -The CLI also edited the supergraph configuration files to include the new subgraph in your supergraph. - -The subgraph is where we will be organizing metadata objects which define our API and without a data connector, our -subgraph is just a placeholder which doesn't do much, yet... - -:::info More about subgraphs - -##### Subgraph independence - -Subgraphs are used to maintain a logical separation of concerns and/or to allow different teams to work on different -parts of the supergraph at the same time without stepping on each other's toes. Usually, a subgraph is "owned" and -maintained by a team that is responsible for the data source, or more broadly: "data domain", that it connects to. And -by convention, one data domain is allocated to one subgraph. - -A subgraph is analogous to a microservice owned by a particular team. - -When we talk about data domains, we're talking about the collection of data and operations for one area of the business, -or team within the business. This could be a single database, a set of databases, a database and business logic, some -other 3rd party APIs, or a combination of all of these. - -Subgraphs can reference each other, allowing, for example, the creation of relationships between them. - -The supergraph is the composition of all subgraphs and their relationships, but it does not care how the metadata is -structured or organized into directories. That is up to the developer. The supergraph only cares that all metadata is -valid and can be compiled into an API build. The CLI and this documentation will suggest best practices for organizing -your metadata, but you can organize it however you choose. - -Subgraphs have their own permission models and development lifecycles separate to that of the supergraph. They can be -developed, tested, and built independently while the supergraph guarantees the integrity of subgraph composition. -Meaning that the supergraph will only build, and generate an API if all subgraphs are in a valid state. In this Getting -Started section we propose best-practice for organizing your subgraphs, but it can be done however you choose. - -##### Naming - -Let's say we're in an organization and the data source we'll be connecting to is for customer profile data. The team -responsible for this data and some services related to it is called the "Customer Data" team. As such, we could create a -`customer_data_team` subgraph, reflecting the team's name, indicating that they're the ones who will be working on it. -Or, we could choose to be more specific and name it `customer_profile` to reflect the data domain itself, if multiple -teams are going to work on it. - -Another example is, if we're working with a data source for a small project which we plan on using to contain all the -data for our whole project, and it's all in PostgreSQL, with some business logic in TypeScript, we could name it -`postgres_data` or `app` to keep it simple. This one subgraph would contain all the data domain specific metadata for -the whole project, at least for the time being. - -::: - -### Step 2. Set subgraph context - -Next, for convenience, we'll set the subgraph using the `context set` command in the CLI. This will simplify any -commands we use later on and save us from having to explicitly call out to which subgraph we're applying certain -actions. - -```bash title="Run the following in your project, changing the directory to match where your subgraph.yaml is located:" -ddn context set subgraph ./my_subgraph/subgraph.yaml -``` - -#### What did `context set subgraph` do? - -This command simply updates a key-value pair in a context file in a project's `.hasura` folder to set your current -subgraph to whatever value you passed. - -### Step 3. Customize prefixes - -To avoid collisions between GraphQL root fields and type names, you can optionally customize the prefixes for each -subgraph. For example, if two subgraphs both have a `Users` type, you can apply different prefixes to distinguish one -from the other. This ensures that each subgraph remains unique and prevents any naming conflicts. - -You can make these modifications in the `subgraph.yaml` file for a subgraph. - -```yaml title="Add the highlighted lines:" -kind: Subgraph -version: v2 -definition: - name: my_subgraph - generator: - rootPath: . - #highlight-start - graphqlRootFieldPrefix: my_subgraph_ - graphqlTypeNamePrefix: My_subgraph_ - #highlight-end -``` - -By default, the `subgraph.yaml` file is generated without any prefixes. You can read more about these fields -[here](supergraph-modeling/build-configs.mdx#subgraph-subgraphgeneratorconfig). - -## Next steps - -We now have a subgraph in which we can -[configure a data connector](/getting-started/build/03-connect-to-data/index.mdx). Data connectors will enable you to -connect external data sources to your supergraph. diff --git a/docs/getting-started/build/03-connect-to-data/01-connect-a-source.mdx b/docs/getting-started/build/03-connect-to-data/01-connect-a-source.mdx deleted file mode 100644 index a418483c1..000000000 --- a/docs/getting-started/build/03-connect-to-data/01-connect-a-source.mdx +++ /dev/null @@ -1,17 +0,0 @@ ---- -sidebar_position: 2 -sidebar_label: Connect a data source -description: "Learn how to get started with Hasura DDN and yourGraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import { DatabaseContentLoader } from "@site/src/components/databaseDocs"; - -# Connect a Data Source - - diff --git a/docs/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx b/docs/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx deleted file mode 100644 index 2b2421e10..000000000 --- a/docs/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx +++ /dev/null @@ -1,104 +0,0 @@ ---- -sidebar_position: 3 -sidebar_label: Update data source metadata -description: "Learn how to get started with Hasura DDN and yourGraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import { DatabaseContentLoader } from "@site/src/components/databaseDocs"; -import Thumbnail from "@site/src/components/Thumbnail"; - -# Update Data Source Metadata - -## What's about to happen? - -In your day-to-day development cycle, there's a good chance your data source's schema will change. When this happens, -you'll need to update your data connector configuration and the supergraph metadata to match these schema changes. - - - -## Step 1. Introspect the data source - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A [connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) initialized. - -::: - -We can run the `connector introspect` command to have the CLI introspect the data source schema, update the -configuration of the connector and also update the corresponding `DataConnectorLink` metadata object for the supergraph -to be able to interact with the connector. - -```bash title="Run the following command, updating the referenced name to match your connector:" -ddn connector introspect -``` - -After this command runs, you can open your `my_subgraph/metadata/my_connector.hml` file and see the `DataConnectorLink` -schema completely updated for you to match your data source's schema changes πŸŽ‰. - -## Step 2. Update or add models - -If an existing model's schema changed in your data source, update it to ensure your Hasura metadata matches the data -source's schema. - -```bash -ddn model update -``` - -:::info Have lots of models? - -If you have a large number of models and want to update them en masse, we've got you covered. - -```bash title="Run the following:" -ddn model update "*" -``` - -You'll see the CLI output information about which models are the same, and which have changed. - -::: - -Alternatively, if you have models which need to be added (e.g., a new table in your underlying data source), you'll need -to create `hml` files for these resources. - -```bash title="Run the following:" -ddn model add -``` - -:::info Have lots of new models to add? - -If you have a large number of models, commands or relationships and want to add them en masse, we've got you covered -just as before. - -```bash title="Run the following:" -ddn model add '*' -ddn command add '*' -ddn relationship add '*' -``` - -You'll see the CLI output information about which models are the same, and which have changed. - -::: - -## What did this do? - -By updating the `my_connector.hml` file, we've provided Hasura with a link between our original data source and the -types which we'll eventually expose via our API. - -```bash title="Be sure to create a new build before testing your API:" -ddn supergraph build local -``` - -## Next steps - -With our data source connected and all of our models tracked, we can move on to -[add custom authorization rules](/getting-started/build/05-add-permissions.mdx) using permissions, -[incorporate custom business logic](/getting-started/build/06-add-business-logic.mdx), or -[create relationships](/getting-started/build/07-create-a-relationship.mdx) across data sources! diff --git a/docs/getting-started/build/03-connect-to-data/_category_.json b/docs/getting-started/build/03-connect-to-data/_category_.json deleted file mode 100644 index 0158649ea..000000000 --- a/docs/getting-started/build/03-connect-to-data/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Connect to data", - "position": 4 -} diff --git a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_clickHouse/_01-connect-a-source.mdx b/docs/getting-started/build/03-connect-to-data/_databaseDocs/_clickHouse/_01-connect-a-source.mdx deleted file mode 100644 index 0889bb39a..000000000 --- a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_clickHouse/_01-connect-a-source.mdx +++ /dev/null @@ -1,168 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -We want to connect our [ClickHouse](https://clickhouse.com/) instance to our API. To do this, we use the Hasura -ClickHouse data connector to facilitate the connection and then introspect the database to generate JSON which the -Hasura CLI will then use to create metadata which can then define your API. - - - -## Step 1. Initialize the ClickHouse connector - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) - -::: - -To initialize the ClickHouse connector, with the appropriate subgraph set in context, run the following command in your -terminal: - -```bash title="Run the following command:" -ddn connector init my_clickhouse -i -``` - -- Select `hasura/clickhouse` from the list of connectors. -- Choose a port (press enter to accept the default recommended by the CLI). -- Enter your connection details. -- In this example, we've called the connector `my_clickhouse`. You can name it something descriptive. - -:::tip Best practices - -Importantly, a data connector can only connect to one data source. - -The project will be kept organized with each data connector's configuration located in a relevant subgraph directory. In -this example the CLI will create a `my_subgraph/connector/my_clickhouse` directory if it doesn't exist. You can also -change this directory by passing a `--dir` flag to the CLI. - -We recommend that the name of the connector and the directory in which the configuration is stored, `my_clickhouse` in -this example, should match for convenience and clarity sake. - -::: - -### What did `connector init` do? - -In the `my_subgraph/connector/my_clickhouse` directory, the CLI created: - -- A `connector.yaml` file which contains the local configuration for the connector. -- A `.hasura-connector` folder which contains the connector definition used to build and run it. -- A `compose.yaml` a file to run the ClickHouse data connector locally in Docker. -- A placeholder `.ddnignore` file to prevent unnecessary files from being included in the build. - -In the `my_subgraph/metadata` directory, the CLI created: - -- A `my_clickhouse.hml` file which contains the [`DataConnectorLink`](/supergraph-modeling/data-connector-links.mdx) - metadata object which describes how the supergraph can interact with the connector. - -Right now, the CLI has only scaffolded out configuration files for the data connector. Our connector still knows nothing -about the ClickHouse database or the data it contains. That's coming up in the next steps. - -You can use a local ClickHouse database or a cloud-hosted option. If you already have one you can connect to, you can go -ahead and do that using the steps above. Hasura DDN will not modify your database in any way, so you can use an existing -database without any worries. - -:::tip Docker networking Inside a Docker container - -`local.hasura.dev` is set to the `host-gateway` alias in the `extra_hosts` option. With this option set, -`local.hasura.dev` resolves to the host machine's gateway IP address from _inside_ the container. This allows various -containers, such as the GraphQL Engine and data connectors, to communicate with each other and out the host machine. - -::: - -:::info Environment-specific caveats - -**Local ClickHouse** - -If you're using a local ClickHouse database β€” such as through -[Docker](https://hub.docker.com/r/clickhouse/clickhouse-server/) β€” you can connect to it directly from the data -connector. However, if you deploy your connector to Hasura DDN, the cloud-hosted version of your data connector won't be -able to find your database. You'll need to use a tool like [ngrok](https://ngrok.com/) to tunnel your database's -connection. This will expose the port, most likely `9000`, on which the database is running and allow Hasura DDN to -connect to it. - -**Cloud-hosted ClickHouse** - -Alternatively, if you have a cloud-hosted database, as Hasura DDN will need to reach your database, ensure you've -allowed connections from anywhere (for now) so that DDN is able to reach it. - -::: - -## Step 2. Introspect your database - -With the connector configured, we can now use the CLI to introspect our ClickHouse database. This step will create a -data connector specific configuration file, and generate the necessary Hasura metadata which describes our API by -creating files for each table in our database. These tables will be tracked as -[Models](/supergraph-modeling/models.mdx). - -```bash title="Run the following command:" -ddn connector introspect my_clickhouse -``` - -:::tip Troubleshooting - -Is introspect not working? Confirm that the environment variables defined in the `.env` file are correct. - -::: - -## What did `connector introspect` do? - -The command introspected your data source to create a JSON configuration file. - -In your terminal window, the CLI started your connector using its `compose.yaml` and then fetched the schema of your -ClickHouse database. - -If you look at the `configuration.json` for your connector, you'll see metadata describing your ClickHouse schema in a -format which the connector specifies. - -Additionally, the CLI updated the `DataConnectorLink` object with the latest metadata to interact with the connector. - -:::tip Initialize a Git repository - -At this point, we recommend initializing a Git repository. This gives you a fallback point as you begin to iterate on -your project. - -::: - -## Step 3. Track your tables - -Tables from ClickHouse are represented as [models](/supergraph-modeling/models.mdx) in your API. The next commands we'll -run will take each table in your database and create an `hml` file for it. These files will then be used by the Hasura -engine to generate your API. - -```bash title="Run the following to create your models and relationships:" -ddn model add my_clickhouse "*" -ddn relationship add my_clickhouse "*" -``` - -If you look in the `metadata` directory for your subgraph, you'll see named files for each resource. These will also -contain relationships based on foreign keys, allowing you to make nested queries in your GraphQL API. - -## Step 4. Create a new build and restart the services - -To reflect the changes in your API, create a new build. - -```bash title= "Run the following:" -ddn supergraph build local -``` - -And, if your services are not already running, start them. - -```bash title="Run the following:" -ddn run docker-start -``` - -You should see your models available in your API by opening your console using: - -```bash title="Run the following:" -ddn console --local -``` - -## Next steps - -With our data source connected and all of our models tracked, we can move on to -[add custom authorization rules](/getting-started/build/05-add-permissions.mdx) using permissions, -[incorporate custom business logic](/getting-started/build/06-add-business-logic.mdx), or -[create relationships](/getting-started/build/07-create-a-relationship.mdx) across data sources! diff --git a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_graphql/_01-connect-a-source.mdx b/docs/getting-started/build/03-connect-to-data/_databaseDocs/_graphql/_01-connect-a-source.mdx deleted file mode 100644 index f25a58a62..000000000 --- a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_graphql/_01-connect-a-source.mdx +++ /dev/null @@ -1,254 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -You can easily and quickly connect any GraphQL API to your supergraph. - -To do this, we use the Hasura [GraphQL Native Data Connector](/connectors/external-apis/graphql.mdx) to facilitate the -connection. - - - -## Step 1. Initialize the GraphQL connector - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) - -::: - -To initialize the GraphQL connector, with the appropriate subgraph set in context, run the following in your terminal: - -```bash title="Run the following command:" -ddn connector init my_graphql -i -``` - -- Select `hasura/graphql` from the list of connectors. -- Choose a port (press enter to accept the default recommended by the CLI). -- In this example, we've called the connector `my_graphql`. You can name it something descriptive. - -:::tip Best practices - -Importantly, a data connector can only connect to one data source. - -The project will be kept organized with each data connector's configuration located in a relevant subgraph directory. In -this example the CLI will create a `my_subgraph/connector/my_graphql` directory if it doesn't exist. You can also change -this directory by passing a `--dir` flag to the CLI. - -We recommend that the name of the connector and the directory in which the configuration is stored, `my_graphql` in this -example, should match for convenience and clarity sake for this tutorial, but it can be anything you want. - -::: - -### What did `connector init` do? - -In the `my_subgraph/connector/my_graphql` directory which we specified in the command, the CLI created: - -- A `connector.yaml` file which contains the local configuration for the connector. -- A `.hasura-connector` folder which contains the connector definition used to build and run it. -- A `compose.yaml` a file to run the GraphQL data connector locally in Docker. -- A placeholder `.ddnignore` file to prevent unnecessary files from being included in the build. -- A `configuration.json` that we'll update with values like our connection string(s) and any relevant headers. -- A `configuration.schema.json` that the connector will use generate your API. - -In the `my_subgraph/metadata` directory, the CLI created: - -- A `my_graphql.hml` file which contains the [`DataConnectorLink`](/supergraph-modeling/data-connector-links.mdx) - metadata object which describes how the supergraph can interact with the connector. - -Right now, the CLI has only scaffolded out configuration files for the data connector. Our connector still knows nothing -about the GraphQL schema. That's coming up in the next steps. - -## Step 2. Configure the roles - -### Step 2.1 Configure introspection - -Under the `introspection` section, add the URL for your GraphQL endpoint. - -```json title="Inside of my_subgraph/connector/my_graphql/configuration.json:" -{ - ... - "introspection": { - "endpoint": { - //highlight-start - "value": "https://my-graphql-endpoint/graphql" - //highlight-end - } - } -} -``` - -:::tip Connecting to an Existing Hasura v2 Instance - -When connecting to an existing Hasura v2 instance, it is important to update the configuration introspection with the -appropriate headers: `x-hasura-admin-secret` and `x-hasura-role`. These headers are necessary for executing -introspection requests successfully. - -You may want to set a pre-defined value for `x-hasura-role` to ensure that introspection occurs under a specific role. -This can be particularly useful if you want requests to be executed without requiring forwarded authentication -credentials. - -**Caution:** If no explicit role is set, the admin role will be used to fetch the schema. This may not be suitable for -your application. - -There are some other considerations for connecting to a v2 instance. For more details, please visit -[the connector's docs](/connectors/external-apis/graphql.mdx). - -::: - -### Step 2.2 Configure execution - -Under the `execution` section, add the URL for your GraphQL endpoint. - -```json title="Inside of my_subgraph/connector/my_graphql/configuration.json:" -{ - ... - "execution": { - "endpoint": { - //highlight-start - "value": "https://my-graphql-endpoint/graphql" - //highlight-end - } - } -} -``` - -### Step 2.3 Configure Request Headers - -If your endpoint requires [authorization or another header](/connectors/external-apis/graphql/#connectionconfiguration), then create that variable and reference it as a header. -If your endpoint requires no extra headers, then skip this step. - -Create a .env file with your new variable, e.g: -```bash title="Inside of .env" -APP_X_HASURA_ADMIN_SECRET=your_secret_key_without_quotations -``` - -Map it in your subgraph.yaml: -```yaml title="Inside of my_subgraph/subgraph.yaml" -... -envMapping: - //highlight-start - APP_X_HASURA_ADMIN_SECRET: - fromEnv: APP_X_HASURA_ADMIN_SECRET - //highlight-end -``` - -Reference it in your connector's compose.yaml: -```yaml title="Inside of my_subgraph/my_connector/compose.yaml" -... -environment: - //highlight-start - X_HASURA_ADMIN_SECRET: $APP_X_HASURA_ADMIN_SECRET - //highlight-end -``` - -And also in your connector's connector.yaml: -```yaml title="Inside of my_subgraph/my_connector/connector.yaml -... -envMapping: - //highlight-start - X_HASURA_ADMIN_SECRET: - fromEnv: APP_X_HASURA_ADMIN_SECRET - //highlight-end -``` - -And finally in your connector's configuration.json: -```json title="Inside of my_subgraph/my_connector/configuration.json" -{ - "$schema": "configuration.schema.json", - "introspection": { - "endpoint": { - "valueFromEnv": "GRAPHQL_ENDPOINT" - }, - //highlight-start - "headers": { - "X-Hasura-Admin-Secret": { - "valueFromEnv": "X_HASURA_ADMIN_SECRET" - } - } - //highlight-end - }, - "execution": { - "endpoint": { - "valueFromEnv": "GRAPHQL_ENDPOINT" - }, - //highlight-start - "headers": { - "X-Hasura-Admin-Secret": { - "valueFromEnv": "X_HASURA_ADMIN_SECRET" - } - } - //highlight-end - } -} -``` - -## Step 3. Introspect your GraphQL API - -This will start the connector using the `compose.yaml` in its directory, and then introspect your GraphQL API and fetch -all the required information for your connector. - -```bash -ddn connector introspect my_graphql -``` - -:::tip Remove placeholder environment variables - -If your API doesn't have authorization, or if you haven't set `GRAPHQL_ENDPOINT_AUTHORIZATION`, remove these values from -your config. Otherwise, introspection will fail. - -::: - -## What did `connector introspect` do? - -The CLI will introspect the GraphQL schema and create a `schema.graphql` file in the `my_subgraph/connector/my_graphql` -directory. This schema is a representation of your external GraphQL API. - -Additionally, the CLI updated the `DataConnectorLink` object with the latest metadata to interact with the connector. - -:::tip o11y via OpenTelemetry - -Yes! Connectors ship with OTEL-enabled tracing available, out of the box πŸŽ‰ - -::: - -## Step 4. Track your types - -Types exposed by your GraphQL API are represented as [Commands](/supergraph-modeling/commands.mdx) in your API. The next -command we'll run will take each type in your GraphQL schema and create an `hml` file for it. These files will then be -used by the Hasura engine to generate your API. - -```bash title="Run the following to create your commands:" -ddn command add my_graphql "*" -``` - -If you look in the `metadata` directory for your subgraph, you'll see named files for each function. - -## Step 5. Create a new build and restart the services - -To reflect the changes in your API, create a new build. - -```bash title= "Run the following:" -ddn supergraph build local -``` - -And, if your services are not already running, start them. - -```bash title="Run the following:" -ddn run docker-start -``` - -You should see your commands available in your API by opening your console using: - -```bash title="Run the following:" -ddn console --local -``` - -## Next steps - -With our data source connected and all of our models tracked, we can move on to -[add custom authorization rules](/getting-started/build/05-add-permissions.mdx) using permissions, -[incorporate custom business logic](/getting-started/build/06-add-business-logic.mdx), or -[create relationships](/getting-started/build/07-create-a-relationship.mdx) across data sources! diff --git a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_mongoDB/_01-connect-a-source.mdx b/docs/getting-started/build/03-connect-to-data/_databaseDocs/_mongoDB/_01-connect-a-source.mdx deleted file mode 100644 index 64ff161ce..000000000 --- a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_mongoDB/_01-connect-a-source.mdx +++ /dev/null @@ -1,190 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -We want to connect our [MongoDB](https://www.mongodb.com/) database to our API. To do this, we use the Hasura MongoDB -data connector to facilitate the connection and then introspect the database to generate JSON which the Hasura CLI will -then use to create metadata which can then define your API. - - - -## Step 1. Initialize the MongoDB connector - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) - -::: - -:::tip Need a connection string? - -Feel free to use this read-only MongoDB connection string for testing purposes: - -```text -mongodb+srv://read_only_user:readonlyuser@v3-docs-sample-app.vh2tp.mongodb.net/sample_mflix?retryWrites=true&w=majority&appName=v3-docs-sample-app -``` - -::: - -To initialize the MongoDB connector, with the appropriate subgraph set in context, run the following command in your -terminal: - -```bash title="Run the following command:" -ddn connector init my_mongo -i -``` - -- Select `hasura/mongodb` from the list of connectors. -- Choose a port (press enter to accept the default recommended by the CLI). -- Enter your connection string -- In this example, we've called the connector `my_mongo`. You can name it something descriptive. - -:::tip Best practices - -Importantly, a data connector can only connect to one data source. - -The project will be kept organized with each data connector's configuration located in a relevant subgraph directory. In -this example the CLI will create a `my_subgraph/connector/my_mongo` directory if it doesn't exist. You can also change -this directory by passing a `--dir` flag to the CLI. - -We recommend that the name of the connector and the directory in which the configuration is stored, `my_mongo` in this -example, should match for convenience and clarity sake. - -::: - -### What did `connector init` do? - -In the `my_subgraph/connector/my_mongo` directory which we specified in the command, the CLI created: - -- A `connector.yaml` file which contains the local configuration for the connector. -- A `.hasura-connector` folder which contains the connector definition used to build and run it. -- A `compose.yaml` a file to run the MongoDB data connector locally in Docker. -- A placeholder `.ddnignore` file to prevent unnecessary files from being included in the build. - -In the `my_subgraph/metadata` directory, the CLI created: - -- A `my_mongo.hml` file which contains the [`DataConnectorLink`](/supergraph-modeling/data-connector-links.mdx) metadata - object which describes how the supergraph can interact with the connector. - -Right now, the CLI has only scaffolded out configuration files for the data connector. Our connector still knows nothing -about the MongoDB database or the data it contains. That's coming up in the next steps. - -You can use a local MongoDB database, or a cloud-hosted one like MongoDB Atlas. Check out -[this page](https://www.mongodb.com/docs/manual/installation/) for a list of options for running a MongoDB database if -you don't already have one. If you already have one you can connect to, you can go ahead and do that. Hasura DDN will -not modify your database in any way, so you can use an existing database without any worries. - -:::tip Docker networking Inside a Docker container - -`local.hasura.dev` is set to the `host-gateway` alias in the `extra_hosts` option. With this option set, -`local.hasura.dev` resolves to the host machine's gateway IP address from _inside_ the container. This allows various -containers, such as the GraphQL Engine and data connectors, to communicate with each other and out the host machine. - -::: - -:::info Environment-specific caveats - -**Local Mongo** - -If you're using a local MongoDB database β€” such as through [Docker](https://hub.docker.com/_/mongodb) β€” you can connect -to it directly from the data connector. However, if you deploy your supergraph to Hasura DDN the cloud-hosted version of -your data connector won't be able to find your database. So tunneling that connection from the start with a tool like -[ngrok](https://ngrok.com/) is a good idea. - -**Cloud-hosted MongoDB** - -Alternatively, if you have a cloud-hosted database, perhaps with -[MongoDB Atlas](https://www.mongodb.com/products/platform/atlas-database) as Hasura DDN will need to reach your -database, ensure you've allowlisted `0.0.0.0/0` (for now) so that DDN is able to reach it. To learn how to deploy a -MongoDB Atlas cluster, see the [official documentation](https://www.mongodb.com/docs/atlas/getting-started/). - -::: - -## Step 2. Introspect your database - -With the connector configured, we can now use the CLI to introspect our MongoDB database. This step will create a data -connector specific configuration files, and generate the necessary Hasura metadata which describes our API by creating -files for each collection in our database. These collections will be tracked as -[Models](/supergraph-modeling/models.mdx). - -```bash title="Run the following command:" -ddn connector introspect my_mongo -``` - -:::tip Troubleshooting - -Is introspect not working? Confirm that the environment variables defined in the `.env` file are correct. - -::: - -## What did `connector introspect` do? - -The command introspected your data source to create configuration files. - -In your terminal window, the CLI started your connector using its `compose.yaml` and then fetched the schema of your -MongoDB database. - -Additionally, the CLI updated the `DataConnectorLink` object with the latest metadata to interact with the connector. - -These include a `configuration.json` file and a new `schema` directory with a definition of all collections found in -MongoDB in a JSON [NDC-compliant](https://github.com/hasura/ndc-spec) format which the connector specifies. - -Eg: - -```text -. -β”œβ”€β”€ comments.json -β”œβ”€β”€ embedded_movies.json -β”œβ”€β”€ movies.json -β”œβ”€β”€ sessions.json -β”œβ”€β”€ theaters.json -└── users.json -``` - -:::tip o11y via OpenTelemetry - -Yes! Connectors ship with OTEL-enabled tracing available, out of the box πŸŽ‰ - -::: - -## Step 3. Track your collections - -Collections from MongoDB are represented as [Models](/supergraph-modeling/models.mdx) in your API. The next commands -we'll run will take each collection in your database and create an `hml` file for it. These files will then be used by -the Hasura engine to generate your API. - -```bash title="Run the following to create your models and relationships:" -ddn model add my_mongo "*" -ddn relationship add my_mongo "*" -``` - -If you look in the `metadata` directory for your subgraph, you'll see named files for each resource. These will also -contain relationships based on foreign keys, allowing you to make nested queries in your GraphQL API. - -## Step 4. Create a new build and restart the services - -To reflect the changes in your API, create a new build. - -```bash title= "Run the following:" -ddn supergraph build local -``` - -And, if your services are not already running, start them. - -```bash title="Run the following:" -ddn run docker-start -``` - -You should see your models available in your API by opening your console using: - -```bash title="Run the following:" -ddn console --local -``` - -## Next steps - -With our data source connected and all of our models tracked, we can move on to -[add custom authorization rules](/getting-started/build/05-add-permissions.mdx) using permissions, -[incorporate custom business logic](/getting-started/build/06-add-business-logic.mdx), or -[create relationships](/getting-started/build/07-create-a-relationship.mdx) across data sources! diff --git a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_openAPI/_01-connect-a-source.mdx b/docs/getting-started/build/03-connect-to-data/_databaseDocs/_openAPI/_01-connect-a-source.mdx deleted file mode 100644 index ade788c79..000000000 --- a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_openAPI/_01-connect-a-source.mdx +++ /dev/null @@ -1,202 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -You can easily and quickly connect any API defined in the [OpenAPI](https://www.openapis.org/) spec format to your -Supergraph. - -To do this, we use the Hasura [OpenAPI Lambda data connector](https://github.com/hasura/ndc-open-api-lambda) to -facilitate the connection. Then, with the DDN CLI, we introspect the OpenAPI document to generate Typescript functions, -and then use those to create DDN metadata which defines our API. - -In addition, once we've generated functions from our OpenAPI document, we can modify them or add new ones to implement -additional business logic. - - - -## Step 1. Initialize the OpenAPI connector - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) - -::: - -To initialize the OpenAPI connector, run the following command in your terminal: - -```bash title="Run the following command:" -ddn connector init my_openapi -i -``` - -- Select `hasura/openapi` from the list of connectors. -- Choose a port (press enter to accept the default recommended by the CLI). -- Enter your connection details. For more information, see Step 2 below. -- In this example, we've called the connector `my_openapi`. You can name it something descriptive. - -:::tip Best practices - -Importantly, a data connector can only connect to one data source. - -The project will be kept organized with each data connector's configuration located in a relevant subgraph directory. In -this example the CLI will create a `my_subgraph/connector/my_openapi` directory if it doesn't exist. You can also change -this directory by passing a `--dir` flag to the CLI. - -We recommend that the name of the connector and the directory in which the configuration is stored, `my_openapi` in this -example, should match for convenience and clarity sake for this tutorial, but it can be anything you want. - -::: - -### What did `connector init` do? - -In the `my_subgraph/connector/my_openapi` directory which we specified in the command, the CLI created: - -- A `connector.yaml` file which contains the local configuration for the connector. -- A `.hasura-connector` folder which contains the connector definition used to build and run it. -- A `compose.yaml` a file to run the OpenAPI data connector locally in Docker. Includes the specified port. -- A placeholder `.ddnignore` file to prevent unnecessary files from being included in the build. - -In the `my_subgraph/metadata` directory, the CLI created: - -- A `my_openapi.hml` file which contains the [`DataConnectorLink`](/supergraph-modeling/data-connector-links.mdx) - metadata object which describes how the supergraph can interact with the connector. - -Right now, the CLI has only scaffolded out configuration files for the data connector. Our connector still knows nothing -about the OpenAPI document. That's coming up in the next steps. - -## Step 2. Specifying the OpenAPI document location - -In the root `.env` file, we can add the `MY_OPENAPI_NDC_OAS_DOCUMENT_URI` environment variable which should point to the -OpenAPI document. If you're using a file instead of a HTTP link, please ensure that it is named `swagger.json` and is -present in the root directory of the volume which is mounted to `/etc/connector` (for this tutorial, the `swagger.json` -file should be present at `my_subgraph/connector/my_openapi/`). - -```text -MY_OPENAPI_NDC_OAS_DOCUMENT_URI= -``` - -This OpenAPI document is only used in the next, `connector introspect` step to generate the TypeScript files. - -:::tip Need a test OpenAPI document URI? - -Feel free to use the pet store OpenAPI document available at: - -```text -https://petstore3.swagger.io/api/v3/openapi.json -``` - -::: - -### Specifying the OpenAPI base URL - -You can also add the `NDC_OAS_BASE_URL` optional environment variable to the `.env.local` file. This should point to the -**base URL** of the API. Eg: `NDC_OAS_BASE_URL=https://petstore3.swagger.io/api/v3` This will then be used as a string -literal in the `functions.ts` file which will be generated in the next step in order for our functions to make API -calls. - -eg: - -```text -NDC_OAS_BASE_URL=https://petstore3.swagger.io/api/v3 -``` - -_Alternatively_, you can edit this value directly at the top of the `functions.ts` file manually later. Eg: - -```typescript -const api = new Api({ - baseUrl: "https://petstore3.swagger.io/api/v3", -}); -``` - -You can also implement additional logic in the `functions.ts` file to handle different endpoints or headers. - -### Additional environment variables - -The OpenAPI connector can also be configured with the following **extra optional environment variables**: - -- `NDC_OAS_FILE_OVERWRITE` (optional): A Boolean flag to allow previously generated files to be overwritten. Defaults to - `false`. -- `HASURA_PLUGIN_LOG_LEVEL` (optional): The log level. Possible values: `trace`, `debug`, `info`, `warn`, `error`, - `fatal`, `panic`. Defaults to `info` -- `NDC_OAS_LAMBDA_PRETTY_LOGS` (optional): A Boolean flag to print human-readable logs instead of JSON. Defaults to - `false` - -These environment variables are already referenced in the scaffolded out -`my_subgraph/connector/my_openapi/.hasura-connector/connector-metadata.yaml` file. - -## Step 3. Introspect your OpenAPI document - -This will introspect your OpenAPI document and files required to run the Typescript project. - -```bash -ddn connector introspect my_openapi -``` - -## What did `connector introspect` do? - -In your terminal window, the CLI started your connector using its `compose.yaml` and then fetched the schema of your -OpenAPI spec. - -The CLI will introspect the OpenAPI document and create an `api.ts` file, a `functions.ts` file and other supporting -files in the `my_subgraph/connector/my_openapi` directory. - -- The `api.ts` file contains the Data Types and API calls from the OpenAPI document. -- The `functions.ts` file contains functions that wrap API calls. You can modify this `functions.ts` file to introduce - extra business logic. - -Additionally, the CLI updated the `DataConnectorLink` object with the latest metadata to interact with the connector. - -:::tip o11y via OpenTelemetry - -Yes! Connectors ship with OTEL-enabled tracing available, out of the box πŸŽ‰ - -::: - -:::note Regenerating the functions file - -The introspection process checks to see if there are any changes in the `api.ts` file in order to determine if it needs -to regenerate files. If you make changes to the `api.ts` file, you can run the `connector introspect` command again to -regenerate the files again. They will only be overwritten if the `NDC_OAS_FILE_OVERWRITE` environment variable is set to -`true`. - -::: - -## Step 4. Track your functions - -Functions generated from the OpenAPI spec are represented as [Commands](/supergraph-modeling/commands.mdx) in your API. -The next command we'll run will take each generated function and create an `hml` file for it. These files will then be -used by the Hasura engine to generate your API. - -```bash title="Run the following to create your commands:" -ddn command add my_openapi "*" -``` - -If you look in the `metadata` directory for your subgraph, you'll see named files for each function. - -## Step 5. Create a new build and restart the services - -To reflect the changes in your API, create a new build. - -```bash title= "Run the following:" -ddn supergraph build local -``` - -And, if your services are not already running, start them. - -```bash title="Run the following:" -ddn run docker-start -``` - -You should see your commands available in your API by opening your console using: - -```bash title="Run the following:" -ddn console --local -``` - -## Next steps - -With our data source connected and all of our models tracked, we can move on to -[add custom authorization rules](/getting-started/build/05-add-permissions.mdx) using permissions, -[incorporate custom business logic](/getting-started/build/06-add-business-logic.mdx), or -[create relationships](/getting-started/build/07-create-a-relationship.mdx) across data sources! diff --git a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_postgreSQL/_01-connect-a-source.mdx b/docs/getting-started/build/03-connect-to-data/_databaseDocs/_postgreSQL/_01-connect-a-source.mdx deleted file mode 100644 index 7afa4abbe..000000000 --- a/docs/getting-started/build/03-connect-to-data/_databaseDocs/_postgreSQL/_01-connect-a-source.mdx +++ /dev/null @@ -1,188 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -We want to connect our [PostgreSQL](https://www.postgresql.org/) database to our API. To do this, we use the Hasura -PostgreSQL data connector to facilitate the connection and then introspect the database to generate JSON which the -Hasura CLI will then use to create metadata which can then define your API. - - - -## Step 1. Initialize the PostgreSQL connector - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) - -::: - -To initialize the PostgreSQL connector, with the appropriate subgraph set in context, run the following command in your -terminal. - -```bash title="Run the following command:" -ddn connector init my_pg -i -``` - -- Select `hasura/postgres` from the list of connectors. -- Choose a port (press enter to accept the default recommended by the CLI). -- Enter your connection string (press enter to accept the sample, read-only database). -- In this example, we've called the connector `my_pg`. You can name it something descriptive. - -:::tip Best practices - -Importantly, a data connector can only connect to one data source. - -The project will be kept organized with each data connector's configuration located in a relevant subgraph directory. In -this example the CLI will create a `my_subgraph/connector/my_pg` directory if it doesn't exist. - -We recommend that the name of the connector and the directory in which the configuration is stored, `my_pg` in this -example, should match for convenience and clarity sake for this tutorial, but it can be anything you want. - -::: - -### What did `connector init` do? - -In the `my_subgraph/connector/my_pg` directory which we specified in the command, the CLI created: - -- A `connector.yaml` file which contains the local configuration for the connector. -- A `.hasura-connector` folder which contains the connector definition used to build and run it. -- A `compose.yaml` a file to run the PostgreSQL data connector locally in Docker. -- A placeholder `.ddnignore` file to prevent unnecessary files from being included in the build. -- A `configuration.json` file which contains configuration for the data connector itself. -- A `schema.json` file which is the JSON schema that `configuration.json` follows. This is to enable autocomplete in VS - Code. - -In the `my_subgraph/metadata` directory, the CLI created: - -- A `my_pg.hml` file which contains the [`DataConnectorLink`](/supergraph-modeling/data-connector-links.mdx) metadata - object which describes how the supergraph can interact with the connector. - -Right now, the CLI only scaffolded out configuration files for the data connector. Our connector still knows nothing -about the PostgreSQL database or the data it contains. That's coming up in the next steps. - -## Step 2. Update the connection URI - -The PostgreSQL connector ships with a sample read-only database connection as the default `CONNECTION_URI`. - -In the root of our project, our `.env` was updated by the CLI to include a new set of values, including the connection -URI. **If you wish to override the default demo connection, you can update the key-value pair of `CONNECTION_URI` with -your custom connection string in this file.** - -```env title="The .env in the root of our project:" -APP_MY_PG_AUTHORIZATION_HEADER="Bearer " -#highlight-start -APP_MY_PG_CONNECTION_URI="postgresql://read_only_user:readonlyuser@35.236.11.122:5432/v3-docs-sample-app" -#highlight-end -APP_MY_PG_HASURA_SERVICE_TOKEN_SECRET="" -APP_MY_PG_OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="http://local.hasura.dev:4317" -APP_MY_PG_OTEL_SERVICE_NAME="app_my_pg" -APP_MY_PG_READ_URL="http://local.hasura.dev:8082" -APP_MY_PG_WRITE_URL="http://local.hasura.dev:8082" -``` - -You can use a local PostgreSQL database or a cloud-hosted option. Hasura DDN will not modify your database in any way, -so you can use an existing database without any worries. - -**Don't have a database at hand?** Check [this guide](/connectors/postgresql/local-postgres.mdx) to set up a local -Postgres database using Docker. - -:::tip Docker networking Inside a Docker container - -`local.hasura.dev` is set to the `host-gateway` alias in the `extra_hosts` option. With this option set, -`local.hasura.dev` resolves to the host machine's gateway IP address from _inside_ the container. This allows various -containers, such as the GraphQL Engine and data connectors, to communicate with each other and out the host machine. - -::: - -:::info Environment-specific caveats - -**Local PostgreSQL** - -If you're using a local PostgreSQL database β€” such as through [Docker](https://hub.docker.com/_/postgres) β€” you can -connect to it directly from the data connector running locally. However, if you deploy your connector to Hasura DDN, the -cloud-hosted version of your data connector won't be able to find your local database. You'll need to use a tool like -[ngrok](https://ngrok.com/) to provide a tunnel to access your database from the cloud. This will expose the port, most -likely `5432`, on which the database is running and allow Hasura DDN to connect to it. - -**Cloud-hosted PostgreSQL** - -Alternatively, if you have a cloud-hosted database, as Hasura DDN will need to reach your database, ensure you've -allowlisted `0.0.0.0/0` (for now) so that DDN is able to reach it. - -::: - -## Step 3. Introspect your database - -With the connector configured, we can now use the CLI to introspect our PostgreSQL database. This step will create a -data connector specific configuration file, and generate the necessary Hasura metadata which describes our API by -creating files for each table in our database. These tables will be tracked as -[Models](/supergraph-modeling/models.mdx). - -```bash title="Run the following command:" -ddn connector introspect my_pg -``` - -## What did `connector introspect` do? - -The command introspected your data source to create a JSON configuration file. - -In your terminal window, the CLI started your connector using its `compose.yaml` and then fetched the schema of your -PostgreSQL database. - -If you look at the `configuration.json` for your connector, you'll see metadata describing your PostgreSQL schema in a -format which the connector specifies. - -Additionally, the CLI updated the `DataConnectorLink` object with the latest metadata to interact with the connector. - -:::tip Initialize a Git repository - -At this point, we recommend initializing a Git repository. This gives you a fallback point as you begin to iterate on -your project. - -::: - -## Step 4. Track your tables - -Tables from PostgreSQL are represented as [models](/supergraph-modeling/models.mdx) in your API. The next commands we'll -run will take each table or view in your database and create an `hml` file for it. These files will then be used by the -Hasura engine to generate your API. - -```bash title="Run the following to create your models and relationships:" -ddn model add my_pg "*" -ddn relationship add my_pg "*" -``` - -If you look in the `metadata` directory for your subgraph, you'll see named files for each resource. These will also -contain relationships based on foreign keys, allowing you to make nested queries in your GraphQL API. - -## Step 5. Create a new build and restart the services - -To reflect the changes in your API, create a new build. - -```bash title= "Run the following:" -ddn supergraph build local -``` - -And, if your services are not already running, start them. - -```bash title="Run the following:" -ddn run docker-start -``` - -You should see your models available in your API by opening your console using: - -```bash title="Run the following:" -ddn console --local -``` - -## Next steps - -With our data source connected and all of our models tracked, we can move on to -[add custom authorization rules](/getting-started/build/05-add-permissions.mdx) using permissions, -[incorporate custom business logic](/getting-started/build/06-add-business-logic.mdx), or -[create relationships](/getting-started/build/07-create-a-relationship.mdx) across data sources! - -Additionally, if your curious how to update your API after the underlying data source schema changes, check out the next -page on [updating data source metadata](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx). diff --git a/docs/getting-started/build/03-connect-to-data/index.mdx b/docs/getting-started/build/03-connect-to-data/index.mdx deleted file mode 100644 index 4a1b59688..000000000 --- a/docs/getting-started/build/03-connect-to-data/index.mdx +++ /dev/null @@ -1,35 +0,0 @@ ---- -sidebar_position: 1 -sidebar_label: Connect to Data -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import Thumbnail from "@site/src/components/Thumbnail"; - -# Connect to Data - -{/* */} - -You can connect any kind of data to Hasura DDN. - -The connection is done through a [**data connector**](/connectors/overview.mdx), a piece of software independent of the -Hasura Engine which facilitates the connection. - -You can use a data connector "off-the-shelf" and built by Hasura, one built by the community, or you can build your own. - -Data connectors can be used directly via the [Hasura Connector Hub](https://hasura.io/connectors/#connectors-list) using -the CLI. The Connector Hub is a managed list of connectors that you can use to connect to your data sources. - -A data connector encapsulates a data source via a web service by implementing the protocol in the -[NDC specification](https://hasura.github.io/ndc-spec/specification/index.html). - -You can connect cloud or local **data sources** via data connectors to Hasura DDN. - -You can use the CLI to [add a data connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) for -your data source and generate the Hasura metadata. diff --git a/docs/getting-started/build/04-build-your-api.mdx b/docs/getting-started/build/04-build-your-api.mdx deleted file mode 100644 index b86ef718e..000000000 --- a/docs/getting-started/build/04-build-your-api.mdx +++ /dev/null @@ -1,126 +0,0 @@ ---- -sidebar_position: 5 -sidebar_label: Build your API & make queries -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import Thumbnail from "@site/src/components/Thumbnail"; - -# Build your Supergraph & Make Queries - -## What's about to happen? - -After [connecting a data source](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) and exposing models, -you can create a new local build of your supergraph. - -A build is a compiled, immutable state of your metadata which the Hasura Engine uses to run your API. - -When you execute the command below, the CLI will create a new build for your supergraph. You'll then be able to execute -a GraphQL query on your API using any models you've added. - - - -## Steps - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- At least one [subgraph](/getting-started/build/02-init-subgraph.mdx) -- At least one [data connector running](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- [Models and/or Commands added](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) to your - subgraph - -::: - -### Step 1. Create a supergraph build - -```bash title="Run:" -ddn supergraph build local -``` - -This will create a build of your supergraph located in the `/engine/build` directory. - -### Step 2. Start your engines! - -If you are already running this command, first stop the command with `CTRL+C`. Then run the command to start the connector and engine services with the updated configurations. - -```bash title="Run:" -ddn run docker-start -``` - -:::tip Port conflicts? - -Stop any previously running `ddn run docker-start` commands in your terminal. Existing running servers can conflict with the ports needed. In particular, engine always starts on port 3000. - -::: - -### Step 3. Launch the Hasura console - -You can check out your local API by launching the Hasura console using the CLI. - -```bash title="Run:" -ddn console --local -``` - -:::tip Privacy settings in some browsers - -Your browser settings or privacy tools may prevent the Console from accessing your local Hasura instance. This could be -due to features designed to protect your privacy and security. Should you encounter one of these issues, we recommend -disabling these settings for the `console.hasura.io` domain. - -[Chrome](https://www.google.com/chrome/) and [Firefox](https://www.mozilla.org/en-US/firefox/new/) are the recommended -browsers for the best experience with the Hasura Console including for local development. - -::: - -### Step 4. Write your first query - -Use the GraphiQL explorer to either write a query or construct it using the menu on the left-hand side of the console. -When you're ready, hit the run button to execute your query. - -```graphql title=" For example, if we have a Carts model, we can run the following query:" -query MyFirstQuery { - carts { - id - isComplete - userId - } -} -``` - -The above query will return a response that looks like this: - -![Simple query with carts](/img/get-started/beta/console_simple-carts-query.png) - -:::info Query names - -In GraphQL, you can provide a name for your query, which in this example is `MyFirstQuery`. This name is customizable by -you at query time and just helps with logging and debugging. - -::: - -## What did this do? - -When you executed the command above, the CLI used the Hasura metadata in your directory - generated based on your data -source(s) β€” to create a local build of your supergraph. This local build is immutable and can be used to test the -changes to your API before either making more changes or, eventually, creating a build on Hasura DDN. - -Next, we recommend learning how easy it is to [add authorization rules](/getting-started/build/05-add-permissions.mdx), -which limit a user's access to data, using permissions. - -## Next steps - -Now that you have a build of your supergraph you can do a lot more with it. Here are some suggestions to allow you to -jump around the Getting Started section according to your interests: - -- [Deploy to cloud](/getting-started/deployment/index.mdx) to make your API available to the world -- [Add permissions](/getting-started/build/05-add-permissions.mdx) to your models -- [Create relationships](/getting-started/build/07-create-a-relationship.mdx) between models -- [Mutate your data](/getting-started/build/08-mutate-data.mdx) with Hasura DDN diff --git a/docs/getting-started/build/05-add-permissions.mdx b/docs/getting-started/build/05-add-permissions.mdx deleted file mode 100644 index 50f9793b0..000000000 --- a/docs/getting-started/build/05-add-permissions.mdx +++ /dev/null @@ -1,337 +0,0 @@ ---- -sidebar_position: 6 -sidebar_label: Add authorization -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import Thumbnail from "@site/src/components/Thumbnail"; - -# Add Authorization - -## What's about to happen? - -If you've already created and tested your supergraph, you'll have seen that queries are executed by default without -authorization and your API is open to the public. - -However, you can easily add authorization rules β€” what we term permissions β€” by declaring them in metadata. This allows -you to control what data can be accessed and what fields can be returned by your API based on the user role, or any -other value in session variables. - -In the steps below, we'll add -[`modelPermissions`](/supergraph-modeling/permissions.mdx#modelpermissions-modelpermissions) to filter the data that can -be accessed by a role and [`typePermissions`](/supergraph-modeling/permissions.mdx#typepermissions-typepermissions) to -control what fields are returned. - -We will then test these permissions by creating a JWT token with the role of `user` and a specific user `id` value, and -pass this token in the `Authorization` header of our request. - -:::tip You can use the Hasura VS Code extension! - -You can use the Hasura VS Code extension to help you author any metadata objects or updates, including -`modelPermissions` and `typePermissions`. You can activate its autocomplete abilities using `CTRL+SPACEBAR` inside of -any `hml` file. - -::: - - - -## Steps - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- At least one [subgraph](/getting-started/build/02-init-subgraph.mdx) -- At least one [data connector running](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) -- [Models and/or Commands added](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) to your - subgraph - -::: - -### Step 1. Create a modelPermission - -By default, a role of `admin` exists for your API and can access all models. To create a new role, such as `user`, -simply add the role to the list of `permissions` for a model and set up your access control rules. In the example below, -we'll allow users with the role of `user` to access only their own rows from a `Users` model by checking for a header -value matching their `id`: - -```yaml title="For example, in a Users.hml" ---- -kind: ModelPermissions -version: v1 -definition: - modelName: Users - permissions: - - role: admin - select: - filter: null - #highlight-start - - role: user - select: - filter: - fieldComparison: - field: id - operator: _eq - value: - sessionVariable: x-hasura-user-id - #highlight-end -``` - -:::tip Where are models? - -Remember, models are under the `metadata` subdirectory under the subgraph directory. - -::: - -### Step 2. Create a typePermission - -The `admin` role also has access to all fields for each model. However, adding a new role and limiting what columns can -be returned from your data source is just as simple. For example, let's restrict what a `user` can see by omitting -several fields from the `typePermissions`: - -```yaml title="For example, in a Users.hml" ---- -kind: TypePermissions -version: v1 -definition: - typeName: Users - permissions: - - role: admin - output: - allowedFields: - - createdAt - - email - - favoriteArtist - - id - - isEmailVerified - - lastSeen - - name - - password - - updatedAt - #highlight-start - - role: user - output: - allowedFields: - - email - - favoriteArtist - - id - - name - - password - #highlight-end -``` - -### Step 3. Add sample JWT authentication - -In a production environment, you would be creating JWT tokens using your authentication provider or custom business -logic solution. In this guide, we're going to encode a test JWT token with the information we need using the -[jwt.io](https://jwt.io/) site and configure our `AuthConfig` object to use this token. - -#### Step 3.1. Generate a JWT secret key - -Generate a random string that we'll use as the JWT secret key: - -```bash title="In your teminal, run the following command" -openssl rand -hex 16 -``` - -Copy the value returned by the terminal. - -:::info Creating a random string - -If you don't want to use openssl, you can use any other random string. The only requirement is that it must be at least -32 characters. - -::: - -#### Step 3.2. Update your AuthConfig object - -Edit your `AuthConfig` object in your project with the following information and to use the secret key we just -generated. - -```yaml title="In globals/metadata/auth-config.hml:" -kind: AuthConfig -version: v2 -definition: - mode: - jwt: - claimsConfig: - namespace: - claimsFormat: Json - location: "/claims.jwt.hasura.io" - key: - fixed: - algorithm: HS256 - key: - value: "" - tokenLocation: - type: BearerAuthorization -``` - -Note that in our `AuthConfig` object, we've set the token location to be in standard `Authorization: Bearer ` -format. This means that when we test our permissions, we'll need to pass the JWT token value in a header in this format. - -Read more about the `AuthConfig` object format [here](/supergraph-modeling/auth-config.mdx). - -#### Step 3.3. Create a new supergraph build - -Create a supergraph build using this `AuthConfig`. - -```bash -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine and connectors using the following command. - -```bash title="Run:" -ddn run docker-start -``` - -**If you haven't [included your connector(s)](/getting-started/build/01-init-supergraph.mdx) in your `compose.yaml`, -don't forget to start it as well.** - -::: - -### Step 4. Generate a JWT token - -Head to the [jwt.io](https://jwt.io/) site and encode a new token with `HS256` algorithm. - -Paste the secret key you generated in the previous step in the "verify signature" field bottom right. - -Paste the following claims object in the "Payload" field: - -```json -{ - "exp": 1739905122, - "iat": 1708369122, - "claims.jwt.hasura.io": { - "x-hasura-default-role": "user", - "x-hasura-allowed-roles": ["user"], - "x-hasura-user-id": "" - } -} -``` - - - -Copy the encoded token value generated by jwt.io on the left side beginning with `ey...`. - -:::info What are these claims? - -In the example above, we're setting the following values: - -- The issued (`iat`) time as `Feb. 19 2024, at 18:58:42` as a Unix epoch timestamp. -- The expiration (`exp`) time as `Feb. 18, 2025 at 18:58:42`. -- The default role as `user`. -- The allowed roles as `user` -- The `x-hasura-user-id` value as the `id` of a user in your data source. - -The values in the `claims.jwt.hasura.io` object are our session variables. For more information about the claims Hasura -expects, check out [this page](/auth/authentication/jwt/setup.mdx). - -::: - -### Step 5: Test your permissions with your custom JWT - -In the Hasura console, add the JWT generated by the console as the value of a new header called `Authorization` on the -`GraphiQL` tab. The value should be in the format `Bearer ` - -Now, you can execute queries with your custom JWT. For example:: - -```graphql -query PermissionTest { - users { - id - email - favoriteArtist - name - password - } -} -``` - -### Step 6. Check the results - -If you've followed the steps correctly, you should see only the email of the user you've tested with, securing your API -from unauthorized access from anyone without a valid JWT token. - - - -### Step 7. Revert changes to authentication (optional) - -With the update to your `AuthConfig` made in the above steps you will now need to provide an `Authorization` header with -JWT tokens to make any queries to your API. - -To make requests with an `admin` role you will need to pass a JWT token with the following claims: - -```json -{ - "exp": 1739905122, - "iat": 1708369122, - "claims.jwt.hasura.io": { - "x-hasura-default-role": "admin", - "x-hasura-allowed-roles": ["admin"] - } -} -``` - -Instead, you can choose to revert your `AuthConfig` to the following to make all API requests to be made with the -`admin` role without need for any authentication. - -```yaml title="In globals/metadata/auth-config.hml:" -kind: AuthConfig -version: v2 -definition: - mode: - noAuth: - role: admin - sessionVariables: {} -``` - -Create a supergraph build using this `AuthConfig`. - -```bash -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine and connectors using the following command. - -```bash title="Run:" -ddn run docker-start -``` - -::: - -## What did this do? - -By adding `modelPermissions`, we've enabled access to the `Users` model for the `user` role. This means that _only_ the -`Users` model is available to a consumer with the `x-hasura-role` value of `user`. - -Additionally, `modelPermissions` are used to filter the rows that can be returned from this model. They don't simply -check if the returned data is allowed; instead, they filter the data before it can be returned. In our example, this is -achieved by validating the `x-hasura-user-id` value against the matching record with the same `id` in the data source. - -We limited the fields they can access to the ones listed above, thereby preventing someone with the `user` role from -accessing information not meant for them, such as when users were last seen. - -We've then encoded a test JWT token with the `user` role and `x-hasura-user-id` value of a user in our data source. -We've passed this token in the `Authorization: Bearer ` header to test our permissions, which we've set up to -allow only the `Users` model to be accessed by a user with the `user` role. Hasura verified this token against our -secret key, decoded the token, extracted the `x-hasura-*` session variables, and applied the permissions we've set up to -the request. - -In a production environment, you would be creating JWT tokens for your users using your authentication provider and -passing them in the `Authorization` header of your requests. diff --git a/docs/getting-started/build/06-add-business-logic.mdx b/docs/getting-started/build/06-add-business-logic.mdx deleted file mode 100644 index fbead725b..000000000 --- a/docs/getting-started/build/06-add-business-logic.mdx +++ /dev/null @@ -1,17 +0,0 @@ ---- -sidebar_position: 7 -sidebar_label: Add custom business logic -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import { DatabaseContentLoader } from "@site/src/components/databaseDocs"; - -# Add Custom Business Logic - - diff --git a/docs/getting-started/build/07-create-a-relationship.mdx b/docs/getting-started/build/07-create-a-relationship.mdx deleted file mode 100644 index de1c7ca57..000000000 --- a/docs/getting-started/build/07-create-a-relationship.mdx +++ /dev/null @@ -1,167 +0,0 @@ ---- -sidebar_position: 8 -sidebar_label: Create a relationship -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import Thumbnail from "@site/src/components/Thumbnail"; - -# Create a Relationship - -## What's about to happen? - -We're going to create a relationship between entities in our supergraph. By doing this we'll unlock the ability to make -API queries across linked information even if that information is on different data sources. - -Relationships are defined in metadata **from an [object type](/supergraph-modeling/types.mdx#objecttype-objecttype), to -a [model](/supergraph-modeling/models.mdx) or [command](/supergraph-modeling/commands.mdx)**. - -By defining a `Relationship`, all models or commands whose output type is the source `ObjectType` will have a connection -to the target `model` or `command`. - - - -## Steps - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- Existing models or commands in your supergraph which can be related to an `ObjectType`. For more information on - creating a model in your supergraph you can see the sections on - [adding data source entities to the API](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) for - your data source. And for commands you can see the - [integrating business logic](/getting-started/build/06-add-business-logic.mdx) section. - -::: - -Let's say you have an e-commerce system which includes `Customers` and `Orders`. Of course, it would be handy to relate -these two entities so that for instance when we query orders, we can see which customer made that order. This is simple -with Hasura DDN. - -For convenience and organization sake we should add this object to our `Orders.hml` file so that these common objects -are located together. We can use the [IDE plugin](/getting-started/build/00-prerequisites.mdx#install-lsp) to help us in -authoring the `Relationship` object to enable this. - -If we type out the `---` object delineation marker below the last object in our `Orders.hml` file and then start typing -`Relationsh...` the IDE plugin will popup and assist us with scaffolding out some options. Select -`Relationship (to model)` from the list and press enter, the plugin will output an empty object like the one below: - -```yaml title="In any HML file, use the Hasura VS Code extension to create this scaffold" ---- -kind: Relationship -version: v1 -definition: - name: relationship_name - sourceType: - target: - model: - name: - relationshipType: - mapping: - - source: - fieldPath: - - fieldName: - target: - modelField: - - fieldName: -``` - -:::tip Intellisense autocomplete - -The Hasura VS Code extension is powerful. At any point in authoring your object you can press `ctrl + space` to trigger -Intellisense and see the available options you have. - -::: - -Now we can fill out these fields. To enable an Orders to Customers relationship: - -- We know that when we query this relationship on an order, we want it to be named: "customer". (Hit `tab` to go the next editable placeholder.) -- We know we want the source of the relationship to be an order. This must be the `Orders` `ObjectType`. -- We know we want the target to be a customer. This must be the `Customers` `Model`. -- We know that the relationship will be 1-to-1. As in, an order only has one related customer. So the `relationshipType` will be an Object not an Array. -- We know that on the source `Orders` `ObjectType` there is a `customerId` field that maps to the id field `customerId` on the - target `Customer` `Model` which we want to use to define the relationship. - -So, from this we can fill in this object like the following: - -```yaml title="For example, in Orders.hml" -kind: Relationship -version: v1 -definition: - name: customer - sourceType: Orders - target: - model: - name: Customers - subgraph: my_subgraph - relationshipType: Object - mapping: - - source: - fieldPath: - - fieldName: customerId - target: - modelField: - - fieldName: customerId - description: The customer details for an order -``` - -Create a new build to test this. - -```bash title="Run:" -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine and connectors using the following command. - -```bash title="Run:" -ddn run docker-start -``` - -**If you haven't [included your connector(s)](/getting-started/build/01-init-supergraph.mdx) in your `compose.yaml`, -don't forget to start it as well.** - -::: - -Then, open your development console. - -```bash title="Run:" -ddn console --local -``` - -## What did this do? - -From our example, we can now get customer related information when we query our orders. πŸŽ‰ - -```graphql title="Assuming the structure above, we can run the following nested query:" -query { - orders { - orderId - orderDate - customer { - customerId - name - email - } - } -} -``` - -Also, from enabling this on the `Orders` `ObjectType` , any other model or command which returns this `ObjectType` now -has the ability to return related customers. - -There are many other configurations of Relationships which you can enable. See the -[supergraph modeling](/supergraph-modeling/relationships.mdx) section for more info. - -## Next Steps - -Check out information on how to [mutate your data](/getting-started/build/08-mutate-data.mdx) with Hasura DDN. diff --git a/docs/getting-started/build/08-mutate-data.mdx b/docs/getting-started/build/08-mutate-data.mdx deleted file mode 100644 index bba3cc032..000000000 --- a/docs/getting-started/build/08-mutate-data.mdx +++ /dev/null @@ -1,23 +0,0 @@ ---- -sidebar_position: 9 -sidebar_label: Mutate data -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide -hide_table_of_contents: true ---- - -import { DatabaseContentLoader } from "@site/src/components/databaseDocs"; - -# Mutate Data - - - -## Next steps - -If you've followed along to this point, you've seen what Hasura can do and are ready to deploy your supergraph. Head to -the [deployment section](/getting-started/deployment/index.mdx) to learn how to deploy connectors and your supergraph. diff --git a/docs/getting-started/build/_category_.json b/docs/getting-started/build/_category_.json deleted file mode 100644 index 4d1d210e4..000000000 --- a/docs/getting-started/build/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Build a Supergraph", - "position": 4 -} diff --git a/docs/getting-started/build/_databaseDocs/_clickHouse/_08-mutate-data.mdx b/docs/getting-started/build/_databaseDocs/_clickHouse/_08-mutate-data.mdx deleted file mode 100644 index 77273fa36..000000000 --- a/docs/getting-started/build/_databaseDocs/_clickHouse/_08-mutate-data.mdx +++ /dev/null @@ -1,180 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -You can easily write data to your database using the native driver of your data source and the TypeScript connector. You -can follow the steps below to create a function which manipulates or inserts data and is exposed as a mutation via your -GraphQL API. - - - -## Steps - -### Step 1. Install node packages - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A [ClickHouse connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) initialized -- A [DataConnectorLink](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) configured -- A [TypeScript connector](/getting-started/build/06-add-business-logic.mdx) configured - -::: - -The TypeScript connector is a regular TypeScript project. We can use any Node.js packages we want. - -We'll use the [`@clickhouse/client` package](https://clickhouse.com/docs/en/integrations/language-clients/javascript) to -access our ClickHouse credentials and connection string, saved as environment variables, and to connect to our database. - -```bash title="From the my_subgraph/connector/my_ts directory, run:" -npm i dotenv @clickhouse/client -``` - -### Step 2. Add the connection URI - -If you've added a ClickHouse connector, you'll have a set of environment variables available in your project's `.env` -file. However, we'll need to make them available to our TypeScript connector by doing the following: - -#### Step 2.1 Update the TypeScript connector's `compose.yaml` - -```yaml title="Add the reference to the connection URI used in your project's root .env:" -environment: - HASURA_SERVICE_TOKEN_SECRET: $APP_MY_TS_HASURA_SERVICE_TOKEN_SECRET - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: $APP_MY_TS_OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - OTEL_SERVICE_NAME: $APP_MY_TS_OTEL_SERVICE_NAME - #highlight-start - CLICKHOUSE_URL: $APP_MY_CLICKHOUSE_URL - CLICKHOUSE_USERNAME: $APP_MY_CLICKHOUSE_USERNAME - CLICKHOUSE_PASSWORD: $APP_MY_CLICKHOUSE_PASSWORD - #highlight-end -``` - -#### Step 2.2 Update the TypeScript connector's `connector.yaml` - -```bash title="Add the environment mapping:" -envMapping: - HASURA_CONNECTOR_PORT: - fromEnv: APP_MY_TS_HASURA_CONNECTOR_PORT - HASURA_SERVICE_TOKEN_SECRET: - fromEnv: APP_MY_TS_HASURA_SERVICE_TOKEN_SECRET - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: - fromEnv: APP_MY_TS_OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - OTEL_SERVICE_NAME: - fromEnv: APP_MY_TS_OTEL_SERVICE_NAME - #highlight-start - CLICKHOUSE_URL: - fromEnv: APP_MY_CLICKHOUSE_URL - CLICKHOUSE_USERNAME: - fromEnv: APP_MY_CLICKHOUSE_USERNAME - CLICKHOUSE_PASSWORD: - fromEnv: APP_MY_CLICKHOUSE_PASSWORD - #highlight-end -``` - -### Step 3. Create a function - -The TypeScript connector uses [JSDoc comments](https://jsdoc.app/) to determine how to expose a function: either as a -query or a mutation. If we include the `@readOnly` tag, it will be exposed as a query. Simply omitting this tag will -expose the function as a mutation. - -We can even add documentation that will be visible in the GraphiQL explorer by tagging `@param` for the argument(s) and -`@returns` for what will be returned by the mutation. - -```ts title="my_subgraph/connectors/my_ts/functions.ts" -import { ClickHouseClient, createClient } from "@clickhouse/client"; - -/** - * @param userData An object containing the user's data. - * @returns The ID of the newly inserted user. - */ -export async function insertUser(userData: { name: string; email: string }): Promise { - const client: ClickHouseClient = createClient({ - url: process.env.CLICKHOUSE_URL, - username: process.env.CLICKHOUSE_USERNAME, - password: process.env.CLICKHOUSE_PASSWORD, - }); - - await client.insert({ - table: "users", - values: [{ name: userData.name, email: userData.email }], - format: "JSONEachRow", - }); - - const selectQuery = ` - SELECT id - FROM users - WHERE name = {name: String} AND email = {email: String} - ORDER BY id DESC - LIMIT 1 - `; - - const result = await client.query({ - query: selectQuery, - format: "JSON", - query_params: { - name: userData.name, - email: userData.email, - }, - }); - - const data = await result.json(); - - if (data.data.length > 0) { - return data.data[0].id; - } else { - throw new Error("Failed to insert user"); - } -} -``` - -### Step 4. Track the function - -Just as before, we'll track our function using the CLI by first updating our `DataConnectorLink`. - -```bash title="From any directory in the project, run:" -ddn connector-link update my_ts -``` - -And then bringing in the new command. - -```bash title="From any directory in the project, run:" -ddn command add my_ts insertUser -``` - -### Step 5. Create a new build and test - -Next, let's create a new build of our supergraph. - -```bash title="Run:" -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine and connectors using the following command. - -```bash title="Run:" -ddn run docker-start -``` - -**If you haven't [included your connector(s)](/getting-started/build/01-init-supergraph.mdx) in your `compose.yaml`, -don't forget to start it as well.** - -::: - -Then, open your development console. - -```bash title="Run:" -ddn console --local -``` - -Finally, you should see your command available, along with its documentation, in the GraphiQL explorer as a mutation. - -## What did this do? - -This function created a mutation that we can use to insert data directly into our database. We utilized the `clickhouse` -package to leverage the native features of ClickHouse directly from our TypeScript function. While the example above is -used to insert data, you can use the same principles to modify or delete existing data in your database via your GraphQL -API. diff --git a/docs/getting-started/build/_databaseDocs/_go/_06-add-business-logic.mdx b/docs/getting-started/build/_databaseDocs/_go/_06-add-business-logic.mdx deleted file mode 100644 index 8e92897d6..000000000 --- a/docs/getting-started/build/_databaseDocs/_go/_06-add-business-logic.mdx +++ /dev/null @@ -1,242 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -With Hasura, you can integrate β€” and even host β€” business logic directly with Hasura DDN and your API. - -You can handle custom business logic using the Go connector. Using this connector, you can transform or -enrich data before it reaches your consumers, or perform any other app business logic you may need. - -You can then integrate these functions as individual [**commands**](/supergraph-modeling/commands.mdx) in your metadata -and resulting API. This process simplifies client applications and speeds up your backend development. - - - -## Steps - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- At least one [subgraph](/getting-started/build/02-init-subgraph.mdx) -- [Go](https://go.dev/) version `>=1.21.0` - -::: - -In this guide we will: - -- Initialize the `hasura/go` data connector -- Generate the metadata to track the function script as a command in our metadata -- Create a new API build and test it - -### Step 1. Initialize the Go connector - -Let's begin by initializing the connector on our project. In the example below, you'll see a familiar flow and use the -`hasura/go` connector from the connector hub. - -```bash title="Run the following command:" -ddn connector init my_go -i -``` - -- Select `hasura/go` from the list of connectors. -- Choose a port (press enter to accept the default recommended by the CLI). -- In this example, we've called the connector `my_go`. You can name it something descriptive. - -:::tip Best practices - -Importantly, a data connector can only connect to one data source. - -The project will be kept organized with each data connector's configuration located in a relevant subgraph directory. In -this example the CLI will create a `my_subgraph/connector/my_go` directory if it doesn't exist. You can also change this -directory by passing a `--dir` flag to the CLI. - -We recommend that the name of the connector and the directory in which the configuration is stored, `my_go` in this -example, should match for convenience and clarity sake. - -::: - -#### What did this do? - -This command created the following file structure in a `my_subgraph/connector/my_go` directory, with Go files in the -`functions` folder being your connector's entrypoint: - -```bash -. -β”œβ”€β”€ .ddnignore -β”œβ”€β”€ .gitignore -β”œβ”€β”€ .hasura-connector -β”‚ β”œβ”€β”€ ... -β”œβ”€β”€ compose.yaml -# highlight-start -β”œβ”€β”€ functions -β”‚ β”œβ”€β”€ hello.go -β”‚ β”œβ”€β”€ types.generated.go -# highlight-end -β”œβ”€β”€ types -β”‚ β”œβ”€β”€ connector.go -β”œβ”€β”€ connector.generated.go -β”œβ”€β”€ connector.go -β”œβ”€β”€ main.go -β”œβ”€β”€ go.mod -β”œβ”€β”€ go.sum -β”œβ”€β”€ Makefile -β”œβ”€β”€ README.md -β”œβ”€β”€ schema.generated.json -``` - -### Step 2. Write business logic - -The template code that ships with the Go connector provides some simple examples in the `functions/hello.go` file to -help explain how it works. - -Functions that have a `Function` prefix are allowed to be exposed as a read-only queries. For example: - -```go -// A hello argument -type HelloArguments struct { - Greeting string `json:"greeting"` // value argument will be required - Count *int `json:"count"` // pointer arguments are optional -} - -// A hello result -type HelloResult struct { - Reply string `json:"reply"` - Count int `json:"count"` -} - -// FunctionHello sends a hello message -func FunctionHello(ctx context.Context, state *types.State, arguments *HelloArguments) (*HelloResult, error) { - count := 1 - if arguments.Count != nil { - count = *arguments.Count + 1 - } - return &HelloResult{ - Reply: fmt.Sprintf("Hi! %s", arguments.Greeting), - Count: count, - }, nil -} -``` - -Functions that have a `Procedure` prefix are allowed to be exposed as a mutation. For example: - -```go -// A create author argument -type CreateAuthorArguments struct { - Name string `json:"name"` -} - -// A create author result -type CreateAuthorResult struct { - ID int `json:"id"` - Name string `json:"name"` -} - -// ProcedureCreateAuthor creates an author -func ProcedureCreateAuthor(ctx context.Context, state *types.State, arguments *CreateAuthorArguments) (*CreateAuthorResult, error) { - return &CreateAuthorResult{ - ID: 1, - Name: arguments.Name, - }, nil -} -``` - -The CLI plugin infers the third argument of the function and generates the input argument. The first result type is -generated as the response schema. - -See more details in the documentation of -[hasura-ndc-go](https://github.com/hasura/ndc-sdk-go/tree/main/cmd/hasura-ndc-go) plugin. - -### Step 3. Track the new function - -To add our function, we can use the following to generate the related metadata that will link together any functions in -the `functions` folder and our API. - -```bash -ddn connector introspect my_go -``` - -Then, we can generate an `hml` file for the function using the following command. - -```bash title="Track the function:" -ddn command add my_go hello -``` - -:::info Have a lot of functions? - -If you have a lot of functions and want to add them all en masse, you can use the CLI to generate metadata for each. - -```bash title="Run the following:" -ddn command add my_go "*" -``` - -::: - -#### What did this do? - -The command introspected your `functions` directory and found any functions present along with their argument and return -types. - -The CLI then used this to create Hasura metadata for each function which can then be exposed in your API. - -### Step 4. Create a new API build and test - -Next, let's create a new build of our supergraph: - -```bash -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine using the following command: - -```bash title="Run:" -ddn run docker-start -``` - -This reads the `docker-start` script from the context config at `.hasura/context.yaml` and starts your Hasura engine, -any connectors, and observability tools. - -::: - -You should see your command available, along with its documentation, in the GraphiQL explorer which you should be able -to access using the CLI. - -```bash title="Run:" -ddn console --local -``` - -```graphql title=" You can then test your new command with the following query:" -query Hello { - hello(greeting: "world") { - reply - count - } -} -``` - -:::tip Privacy settings in some browsers - -Your browser settings or privacy tools may prevent the Console from accessing your local Hasura instance. This could be -due to features designed to protect your privacy and security. Should you encounter one of these issues, we recommend -disabling these settings for the `console.hasura.io` domain. - -[Chrome](https://www.google.com/chrome/) and [Firefox](https://www.mozilla.org/en-US/firefox/new/) are the recommended -browsers for the best experience with the Hasura Console including for local development. - -::: - -## What did this do? - -The commands above initialized a new Go connector, installed dependencies, and created a new function and procedure to -demonstrate how the connector works. We then added this function to our metadata as a command, and created a new build -of our supergraph. - -## Next Steps - -You can also [create relationships](/getting-started/build/07-create-a-relationship.mdx) between types in your -supergraph and your commands. This enables you to pair custom business logic with β€” for example β€” database tables, and -then transform or enrich data before sending it back to your consumers. - -You can learn more about creating these and other relationships on the -[next page](/getting-started/build/07-create-a-relationship.mdx). diff --git a/docs/getting-started/build/_databaseDocs/_graphql/_08-mutate-data.mdx b/docs/getting-started/build/_databaseDocs/_graphql/_08-mutate-data.mdx deleted file mode 100644 index 7382de2a2..000000000 --- a/docs/getting-started/build/_databaseDocs/_graphql/_08-mutate-data.mdx +++ /dev/null @@ -1,148 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -You can easily mutate data using your existing GraphQL mutations, which, if present when you introspected your schema, -will be available via your supergraph's API. - - - -## Write your mutation - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A [GraphQL connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) initialized -- A [DataConnectorLink](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) configured - -::: - -Simply head to the GraphiQL explorer in the console and write a valid mutation like the example below: - -```graphql -mutation InsertTestingMutation { - insertTestingOne(object: { name: "Hasura" }) { - id - name - } -} -``` - -This mutation is possible because, for each mutation present in your external source's schema, Hasura generates a -[Command](/supergraph-modeling/commands.mdx). - -
- You can see an example of a Command's metadata here. - -```yaml ---- -kind: ObjectType -version: v1 -definition: - name: TestingInsertInput - description: input type for inserting data into table "testing" - fields: - - name: id - type: Int - - name: name - type: String - graphql: - typeName: TestingInsertInput - inputTypeName: TestingInsertInputInput - dataConnectorTypeMapping: - - dataConnectorName: my_graphql - dataConnectorObjectType: testing_insert_input - ---- -kind: TypePermissions -version: v1 -definition: - typeName: TestingInsertInput - permissions: - - role: admin - output: - allowedFields: - - id - - name - ---- -kind: ObjectType -version: v1 -definition: - name: TestingOnConflict - description: on_conflict condition type for table "testing" - fields: - - name: constraint - type: TestingConstraint! - - name: updateColumns - type: "[TestingUpdateColumn!]!" - - name: where - type: TestingBoolExp - graphql: - typeName: TestingOnConflict - inputTypeName: TestingOnConflictInput - dataConnectorTypeMapping: - - dataConnectorName: my_graphql - dataConnectorObjectType: testing_on_conflict - fieldMapping: - constraint: - column: - name: constraint - updateColumns: - column: - name: update_columns - where: - column: - name: where - ---- -kind: TypePermissions -version: v1 -definition: - typeName: TestingOnConflict - permissions: - - role: admin - output: - allowedFields: - - constraint - - updateColumns - - where - ---- -kind: Command -version: v1 -definition: - name: InsertTesting - outputType: TestingMutationResponse - arguments: - - name: objects - type: "[TestingInsertInput!]!" - description: the rows to be inserted - - name: onConflict - type: TestingOnConflict - description: upsert condition - source: - dataConnectorName: my_graphql - dataConnectorCommand: - procedure: insert_testing - argumentMapping: - objects: objects - onConflict: on_conflict - graphql: - rootFieldName: insertTesting - rootFieldKind: Mutation - description: 'insert data into the table: "testing"' - ---- -kind: CommandPermissions -version: v1 -definition: - commandName: InsertTesting - permissions: - - role: admin - allowExecution: true -``` - -
diff --git a/docs/getting-started/build/_databaseDocs/_mongoDB/_08-mutate-data.mdx b/docs/getting-started/build/_databaseDocs/_mongoDB/_08-mutate-data.mdx deleted file mode 100644 index 8eb9345b5..000000000 --- a/docs/getting-started/build/_databaseDocs/_mongoDB/_08-mutate-data.mdx +++ /dev/null @@ -1,161 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -You can easily write data to your database using the native driver of your data source and the TypeScript connector. You -can follow the steps below to create a function which manipulates or inserts data and is exposed as a mutation via your -GraphQL API. - - - -## Steps - -### Step 1. Create a function - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A [MongoDB connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) initialized -- A [DataConnectorLink](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) configured -- A [TypeScript connector](/getting-started/build/06-add-business-logic.mdx) configured - -::: - -The TypeScript connector is a regular TypeScript project. We can use any Node.js packages we want. - -We'll use the `mongodb` package to access our connection string, saved as an environment variable, and to connect to our -database. - -```bash title="From the my_subgraph/connector/my_mongo directory, run:" -npm i mongodb -``` - -### Step 2. Add the connection URI - -If you've added a MongoDB connector, you'll have an environment variable available in your project's `.env` file. -However, we'll need to make it available to our TypeScript connector by doing the following: - -#### Step 2.1 Update the TypeScript connector's `compose.yaml` - -```yaml title="Add the reference to the connection URI used in your project's root .env:" -environment: - HASURA_SERVICE_TOKEN_SECRET: $APP_MY_TS_HASURA_SERVICE_TOKEN_SECRET - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: $APP_MY_TS_OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - OTEL_SERVICE_NAME: $APP_MY_TS_OTEL_SERVICE_NAME - #highlight-start - MY_MONGO_MONGODB_DATABASE_URI: $APP_MY_MONGO_MONGODB_DATABASE_URI - #highlight-end -``` - -#### Step 2.2 Update the TypeScript connector's `connector.yaml` - -```bash title="Add the environment mapping:" -envMapping: - HASURA_CONNECTOR_PORT: - fromEnv: APP_MY_TS_HASURA_CONNECTOR_PORT - HASURA_SERVICE_TOKEN_SECRET: - fromEnv: APP_MY_TS_HASURA_SERVICE_TOKEN_SECRET - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: - fromEnv: APP_MY_TS_OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - OTEL_SERVICE_NAME: - fromEnv: APP_MY_TS_OTEL_SERVICE_NAME - #highlight-start - MY_MONGO_MONGODB_DATABASE_URI: - fromEnv: APP_MY_MONGO_MONGODB_DATABASE_URI - #highlight-end -``` - -### Step 3. Create a function - -The TypeScript connector uses [JSDoc comments](https://jsdoc.app/) to determine how to expose a function: either as a -query or a mutation. If we include the `@readOnly` tag, it will be exposed as a query. Simply omitting this tag will -expose the function as a mutation. - -We can even add documentation that will be visible in the GraphiQL explorer by tagging `@param` for the argument(s) and -`@returns` for what will be returned by the mutation. - -```ts title="my_subgraph/connectors/my_ts/functions.ts" -import { MongoClient } from "mongodb"; - -/** - * @param productData An object containing the product's data. - * @returns The ID of the newly inserted product. - */ -export async function insertProduct(productData: { title: string; description: string }): Promise { - const client = new MongoClient(process.env.MY_MONGO_MONGODB_DATABASE_URI!, { - useNewUrlParser: true, - useUnifiedTopology: true, - }); - - try { - await client.connect(); - const database = client.db("yourDatabaseName"); // Replace with your database name - const collection = database.collection("product"); - - const result = await collection.insertOne({ - title: productData.title, - description: productData.description, - }); - - if (result.insertedId) { - return result.insertedId.toString(); - } else { - throw new Error("Failed to insert product"); - } - } finally { - await client.close(); - } -} -``` - -### Step 4. Track the function - -Just as before, we'll track our function using the CLI by first updating our `DataConnectorLink`. - -```bash title="From any directory in the project, run:" -ddn connector-link update my_ts -``` - -And then bringing in the new command. - -```bash title="From any directory in the project, run:" -ddn command add my_ts insertProduct -``` - -### Step 5. Create a new build and test - -Next, let's create a new build of our supergraph. - -```bash title="Run:" -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine and connectors using the following command. - -```bash title="Run:" -ddn run docker-start -``` - -**If you haven't [included your connector(s)](/getting-started/build/01-init-supergraph.mdx) in your `compose.yaml`, -don't forget to start it as well.** - -::: - -Then, open your development console. - -```bash title="Run:" -ddn console --local -``` - -Finally, you should see your command available, along with its documentation, in the GraphiQL explorer as a mutation. - -## What did this do? - -This function created a mutation that we can use to insert data directly into our database. We utilized the `mongodb` -package to leverage the native features of MongoDB directly from our TypeScript function. While the example above is -used to insert data, you can use the same principles to modify or delete existing data in your database via your GraphQL -API. diff --git a/docs/getting-started/build/_databaseDocs/_openAPI/_08-mutate-data.mdx b/docs/getting-started/build/_databaseDocs/_openAPI/_08-mutate-data.mdx deleted file mode 100644 index 1e7b8c15c..000000000 --- a/docs/getting-started/build/_databaseDocs/_openAPI/_08-mutate-data.mdx +++ /dev/null @@ -1,150 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -We can add business logic to our existing or new functions to modify data before it's sent to the API, or before it's -returned to the client. - - - -## Steps - -### Step 1. Create or modify an existing function - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A [OpenAPI connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) initialized -- A [DataConnectorLink](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) configured - -::: - -For this example we'll add a step to an **existing function** to capitalize the name of a pet before it's added to the -store. - -```ts title="my_subgraph/connector/my_openapi/functions.ts" -/** - * Add a new pet to the store - * @request POST :/pet - * @allowrelaxedtypes - * @save - */ -export async function postPetAddPet( - /** Request body */ - data: Pet, - headers?: hasuraSdk.JSONValue, -): Promise { - // highlight-start - // Capitalize pet's name - data.name = data.name?.toUpperCase(); - // highlight-end - - const result = await api.pet.addPet({ - data: data, - params: { - headers: (headers?.value as Record) ?? undefined, - }, - }); - if (result.data) { - return result.data; - } else { - throw result.error; - } -} -``` - -We can also, for example, add a completely **new function** to get a pet by its ID and return a capitalized name: - -```ts title="my_subgraph/connector/my_openapi/functions.ts" -/** - * Find capitalized pet by ID - * @request GET :/pet/{petId} - * @allowrelaxedtypes - * @readonly - */ -export async function getPetGetCapPetById( - /** - * ID of pet to return capitalized name - */ - petId: number, - headers?: hasuraSdk.JSONValue, -): Promise { - const result = await api.pet.getPetById({ - petId: petId, - params: { - headers: (headers?.value as Record) ?? undefined, - }, - }); - if (result.data) { - // highlight-start - result.data.name = result.data.name?.toUpperCase(); - // highlight-end - return result.data; - } else { - throw result.error; - } -} -``` - -If we use the "@save" tag in the JSDoc comments for the function, changes we make to the function will not be -overwritten if we re-introspect the connector. - -:::info JSDoc comments - -The OpenAPI connector uses [JSDoc comments](https://jsdoc.app/) to determine how to expose a function: either as a query -or a mutation. If we include the `@readOnly` tag, it will be exposed as a query. Simply omitting this tag will expose -the function as a mutation. - -We can even add documentation that will be visible in the GraphiQL explorer by tagging `@param` for the argument(s) and -`@returns` for what will be returned by the mutation. - -`@allowrelaxedtypes` converts unsupported types into scalars. This is because certain TypeScript types, such as `any` -don't map well to GraphQL. So, `@allowrelaxedtypes` tells the connector to convert these into an opaque scalar type -instead of rejecting the function that uses them. - -::: - -### Step 2. Track the function - -We'll track our function using the CLI by first updating our `DataConnectorLink` for this data connector: - -```bash -ddn connector-link update -``` - -And then bringing in the command: - -```bash -ddn command add getPetGetCapPetById -``` - -### Step 3. Create a new build and test - -Next, let's create a new build of our supergraph: - -```bash -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine and connectors using the following command. - -```bash title="Run the following command, updating the referenced paths to match your directory structure:" -ddn run docker-start -``` - -**If you haven't [included your connector(s)](/getting-started/build/01-init-supergraph.mdx) in your -`docker-compose.hasura.yaml`, don't forget to start it as well.** - -::: - -Then, open your development console. - -```bash title="Run:" -ddn console --local -``` - -Finally, you should see your command available, along with its documentation, in the GraphiQL explorer as a mutation. diff --git a/docs/getting-started/build/_databaseDocs/_postgreSQL/_08-mutate-data.mdx b/docs/getting-started/build/_databaseDocs/_postgreSQL/_08-mutate-data.mdx deleted file mode 100644 index 2a9376984..000000000 --- a/docs/getting-started/build/_databaseDocs/_postgreSQL/_08-mutate-data.mdx +++ /dev/null @@ -1,316 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -We provide two main ways to mutate PostgreSQL data: via auto-generated PostgreSQL mutations, and via the TypeScript connector. -The auto-generated mutations are more convenient, while the TypeScript connector is more flexible. Let's explore what -each option has to offer. - -## Via auto-generated mutations - -### What's about to happen? - -The PostgreSQL connector provides auto-generated [point mutations](https://hasura.io/docs/3.0/connectors/postgresql/#point-mutations) - -these are insert, delete and update mutations affecting a single row using uniqueness constraints. -You can follow these steps to enable this feature, generate GraphQL mutations models, and define permissions for them. - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A [PostgreSQL connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) initialized - -::: - -### Steps - -#### Step 1. Enable auto-generated mutations - -These auto-generated mutations feature is disabled by default by the PostgreSQL connector. In order to enable it, -open the ndc-postgres `configuration.json` found in the connector's directory, and edit the field `mutationsVersion` -to `"v2"`. This will tell the connector to generate mutations based on the configuration, for which we can create -models. - -This can be done with the following sed script: - -```bash title="Run the following command, updating the referenced paths to match your directory structure:" -sed -i 's/"mutationsVersion": null/"mutationsVersion": "v2"/' app/connector/my_pg/configuration.json -``` - -#### Step 2. Introspect the connector and add resources - -Next, we will introspect the connector and import all of the newly available mutation commands. - -```bash title="Run:" -ddn connector introspect my_pg --add-all-resources -``` - -This will create `.hml` files for each mutation model, such as: - -``` -. -β”œβ”€β”€ app -β”‚Β Β  β”œβ”€β”€ ... -β”‚Β Β  β”œβ”€β”€ metadata -β”‚Β Β  β”‚Β Β  β”œβ”€β”€ ... -β”‚Β Β  β”‚Β Β  β”œβ”€β”€ V2DeleteUsersByEmail.hml -β”‚Β Β  β”‚Β Β  β”œβ”€β”€ V2InsertUsers.hml -β”‚Β Β  β”‚Β Β  β”œβ”€β”€ V2UpdateUsersById.hml -``` - -#### Step 3. Set command permissions via argument presets - -Using [argument presets](/supergraph-modeling/permissions.mdx#boolean-expressions) we can specify a boolean expression -as a preset argument in place of the permission checks for each command. - -Each command requires entries for the following arguments: - -- Delete: - - `preCheck` - check a condition on a row before deleting it. -- Insert: - - `postCheck` - check a condition on the inserted row. -- Update: - - `preCheck` - check a condition on a row before updating it. - - `postCheck` - check a condition on the updated row. - -We can specify these argument presets in the relevant `.hml` file for the required model. - -For example, in the `V2DeleteUsersById` model, we can specify that the only row we allow to delete is one where `id` -is equals to `b1358c05-a457-41e7-b77e-56efce2cdd06` by adding the following `argumentPresets` structure to the -`CommandPermissions` in the `app/metadata/V2DeleteUsersById.hml` file. - -```hml -kind: CommandPermissions -version: v1 -definition: - commandName: V2DeleteUsersById - permissions: - - role: admin - allowExecution: true - argumentPresets: - - argument: preCheck - value: - booleanExpression: - fieldComparison: - field: id - operator: _eq - value: - literal: b1358c05-a457-41e7-b77e-56efce2cdd06 -``` - -If we don't wish to provide any checks at this time, we can provide the following: - -```hml -value: - booleanExpression: - and: [] -``` - -For example, used here as both `pre-` and `post-` checks on an update mutation: - -```hml -kind: CommandPermissions -version: v1 -definition: - commandName: V2UpdateUsersById - permissions: - - role: admin - allowExecution: true - argumentPresets: - - argument: preCheck - value: - booleanExpression: - and: [] - - argument: postCheck - value: - booleanExpression: - and: [] -``` - -#### Step 4. Build and run - -After editing the `.hml` files of your models, create a new build of our supergraph: - -```bash title="Run:" -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine and connectors using the following command. - -```bash title="Run:" -ddn run docker-start -``` - -**If you haven't [included your connector(s)](/getting-started/build/01-init-supergraph.mdx) in your `compose.yaml`, -don't forget to start it as well.** - -::: - -Then, open your development console. - -```bash title="Run:" -ddn console --local -``` - -Finally, you should see your command available, along with its documentation, in the GraphiQL explorer as a mutation. - -## Via the TypeScript connector - -### What's about to happen? - -You can easily write data to your database using the native driver of your data source and the TypeScript connector. You -can follow the steps below to create a function which manipulates or inserts data and is exposed as a mutation via your -GraphQL API. - - - -### Steps - -#### Step 1. Install node packages - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [supergraph](/getting-started/build/01-init-supergraph.mdx) -- A new or existing [subgraph](/getting-started/build/02-init-subgraph.mdx) -- A [PostgreSQL connector](/getting-started/build/03-connect-to-data/01-connect-a-source.mdx) initialized -- A [DataConnectorLink](/getting-started/build/03-connect-to-data/02-create-source-metadata.mdx) configured -- A [TypeScript connector](/getting-started/build/06-add-business-logic.mdx) configured - -::: - -The TypeScript connector is a regular TypeScript project. We can use any Node.js packages we want. - -We'll the `pg` package to access our connection string, saved as an environment variable, and to connect to our -database. - -```bash title="From the my_subgraph/connector/my_ts directory, run:" -npm i pg -npm i --save-dev @types/pg -``` - -#### Step 2. Add the connection URI - -If you've added a PostgreSQL connector, you'll have an environment variable available in your project's `.env` file. -However, we'll need to make it available to our TypeScript connector by doing the following: - -#### Step 2.1 Update the TypeScript connector's `compose.yaml` - -```yaml title="Add the reference to the connection URI used in your project's root .env:" -environment: - HASURA_SERVICE_TOKEN_SECRET: $APP_MY_TS_HASURA_SERVICE_TOKEN_SECRET - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: $APP_MY_TS_OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - OTEL_SERVICE_NAME: $APP_MY_TS_OTEL_SERVICE_NAME - #highlight-start - PG_CONNECTION_URI: $APP_MY_PG_CONNECTION_URI - #highlight-end -``` - -#### Step 2.2 Update the TypeScript connector's `connector.yaml` - -```bash title="Add the environment mapping:" -envMapping: - HASURA_CONNECTOR_PORT: - fromEnv: APP_MY_TS_HASURA_CONNECTOR_PORT - HASURA_SERVICE_TOKEN_SECRET: - fromEnv: APP_MY_TS_HASURA_SERVICE_TOKEN_SECRET - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: - fromEnv: APP_MY_TS_OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - OTEL_SERVICE_NAME: - fromEnv: APP_MY_TS_OTEL_SERVICE_NAME - #highlight-start - PG_CONNECTION_URI: - fromEnv: APP_MY_PG_CONNECTION_URI - #highlight-end -``` - -#### Step 3. Create a function - -The TypeScript connector uses [JSDoc comments](https://jsdoc.app/) to determine how to expose a function: either as a -query or a mutation. If we include the `@readOnly` tag, it will be exposed as a query. Simply omitting this tag will -expose the function as a mutation. - -We can even add documentation that will be visible in the GraphiQL explorer by tagging `@param` for the argument(s) and -`@returns` for what will be returned by the mutation. - -```ts title="my_subgraph/connectors/my_ts/functions.ts" -import { Client } from "pg"; - -/** - * @param userData An object containing the user's data. - * @returns The ID of the newly inserted user. - */ -export async function insertUser(userData: { name: string; email: string }): Promise { - const client = new Client({ - connectionString: process.env.PG_CONNECTION_URI, - }); - - await client.connect(); - - const queryText = ` - INSERT INTO users (name, email) - VALUES ($1, $2) - RETURNING id - `; - const values = [userData.name, userData.email]; - const result = await client.query(queryText, values); - - await client.end(); - - if (result.rows.length > 0) { - return result.rows[0].id; - } else { - throw new Error("Failed to insert user"); - } -} -``` - -#### Step 4. Track the function - -Just as before, we'll track our function using the CLI by first updating our `DataConnectorLink`. - -```bash title="From any directory in the project, run:" -ddn connector-link update my_ts -``` - -And then bring in the new command. - -```bash title="From any directory in the project, run:" -ddn command add my_ts insertUser -``` - -#### Step 5. Create a new build and test - -Next, let's create a new build of our supergraph. - -```bash title="Run:" -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine and connectors using the following command. - -```bash title="Run:" -ddn run docker-start -``` - -**If you haven't [included your connector(s)](/getting-started/build/01-init-supergraph.mdx) in your `compose.yaml`, -don't forget to start it as well.** - -::: - -Then, open your development console. - -```bash title="Run:" -ddn console --local -``` - -Finally, you should see your command available, along with its documentation, in the GraphiQL explorer as a mutation. - -### What did this do? - -This function created a mutation that we can use to insert data directly into our database. We utilized raw SQL to -leverage the native features of PostgreSQL directly from our TypeScript function. While the example above is used to -insert data, you can use the same principles to modify or delete existing data in your database via your GraphQL API. diff --git a/docs/getting-started/build/_databaseDocs/_python/_06-add-business-logic.mdx b/docs/getting-started/build/_databaseDocs/_python/_06-add-business-logic.mdx deleted file mode 100644 index 379cb41a7..000000000 --- a/docs/getting-started/build/_databaseDocs/_python/_06-add-business-logic.mdx +++ /dev/null @@ -1,249 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -With Hasura, you can integrate β€” and even host β€” business logic directly with Hasura DDN and your API. - -You can handle custom business logic using the Python Lambda connector. Using this connector, you can transform or -enrich data before it reaches your consumers, or perform any other app business logic you may need. - -You can then integrate these functions as individual [**commands**](/supergraph-modeling/commands.mdx) in your metadata -and resulting API. This process simplifies client applications and speeds up your backend development. - - - -## Steps - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- At least one [subgraph](/getting-started/build/02-init-subgraph.mdx) -- [Python](https://www.python.org/downloads/) version `>=3.11` - -::: - -In this guide we will: - -- Initialize the `hasura/python` data connector -- Use Python and pip to create a simple script -- Generate the metadata to track the function script as a command in our metadata -- Create a new API build and test it - -### Step 1. Initialize the Python Lambda connector - -Let's begin by initializing the connector on our project. In the example below, you'll see a familiar flow and use the -`hasura/python` connector from the connector hub. - -```bash title="Run the following command:" -ddn connector init my_python -i -``` - -- Select `hasura/python` from the list of connectors. -- Choose a port (press enter to accept the default recommended by the CLI). -- In this example, we've called the connector `my_python`. You can name it something descriptive. - -:::tip Best practices - -Importantly, a data connector can only connect to one data source. - -The project will be kept organized with each data connector's configuration located in a relevant subgraph directory. In -this example the CLI will create a `my_subgraph/connector/my_python` directory if it doesn't exist. You can also change -this directory by passing a `--dir` flag to the CLI. - -We recommend that the name of the connector and the directory in which the configuration is stored, `my_python` in this -example, should match for convenience and clarity sake. - -::: - -#### What did this do? - -This command created the following file structure in a `my_subgraph/connector/my_python` directory, with the -`functions.py` file being your connector's entrypoint: - -```bash -. -β”œβ”€β”€ .ddnignore -β”œβ”€β”€ .gitignore -β”œβ”€β”€ .hasura-connector -β”‚ β”œβ”€β”€ ... -β”œβ”€β”€ compose.yaml -β”œβ”€β”€ connector.yaml -# highlight-start -β”œβ”€β”€ functions.py -# highlight-end -└── requirements.txt -``` - -### Step 2. Write business logic - -The template code that ships with the Python Lambda connector provides some simple examples to help explain how it -works. We can replace those example functions for now. - -In this simple example, we're going to transform a timestamp with timezone (eg: "2024-03-14T08:00:00Z") into a nicely -formatted version for humans, eg: "8am, Thursday, March 14th, 2024." - -We'll replace the all the default functions in our `functions.py` file with the following: - -```python -from hasura_ndc import start -from hasura_ndc.function_connector import FunctionConnector -from datetime import datetime # Don't forget to import datetime at the top of the file! - -connector = FunctionConnector() - -@connector.register_query -async def format_timezone_date(date_string: str) -> str: - date = datetime.fromisoformat(date_string) - - day = date.day - nth = lambda d: "th" if 11 <= d <= 13 else {1: "st", 2: "nd", 3: "rd"}.get(d % 10, "th") - - hours = date.hour - ampm = "pm" if hours >= 12 else "am" - hour = hours % 12 or 12 - - day_of_week = date.strftime("%A") - month = date.strftime("%B") - year = date.year - - return f"{hour}{ampm}, {day_of_week}, {month} {day}{nth(day)}, {year}." - - -if __name__ == "__main__": - start(connector) -``` - -As this is a Python project, you can install any dependency! - -### Step 3. Track the new function - -To add our function, we can use the following to generate the related metadata that will link together any functions in -this `functions.py` file and our API. - -```bash -ddn connector introspect my_python -``` - -Then, we can generate an `hml` file for the function using the following command. - -```bash title="Track the function:" -ddn command add my_python format_timezone_date -``` - -:::info Have a lot of functions? - -If you have a lot of functions and want to add them all en masse, you can use the CLI to generate metadata for each. - -```bash title="Run the following:" -ddn command add my_python "*" -``` - -::: - -#### What did this do? - -The command introspected your `functions.py` file and found any functions present along with their argument and return -types. - -The CLI then used this to create Hasura metadata for each function which can then be exposed in your API. - -### Step 4. Create a new API build and test - -Next, let's create a new build of our supergraph: - -```bash -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine using the following command: - -```bash title="Run:" -ddn run docker-start -``` - -This reads the `docker-start` script from the context config at `.hasura/context.yaml` and starts your Hasura engine, -any connectors, and observability tools. - -::: - -You should see your command available, along with its documentation, in the GraphiQL explorer which you should be able -to access using the CLI. - -```bash title="Run:" -ddn console --local -``` - -```graphql title=" You can then test your new command with the following query:" -query MyQuery { - formatTimezoneDate(dateString: "2024-03-14") -} -``` - - - -
- -:::tip Privacy settings in some browsers - -Your browser settings or privacy tools may prevent the Console from accessing your local Hasura instance. This could be -due to features designed to protect your privacy and security. Should you encounter one of these issues, we recommend -disabling these settings for the `console.hasura.io` domain. - -[Chrome](https://www.google.com/chrome/) and [Firefox](https://www.mozilla.org/en-US/firefox/new/) are the recommended -browsers for the best experience with the Hasura Console including for local development. - -::: - -:::note Running Python locally - -By default, Python and your functions run in a Docker container along with your other Hasura services. The -`connector init` command created a `compose.yaml` file for you, which are used by default. - -However, if you prefer to run Python directly on your local machine, you can do so by following these steps: - -1. Ensure you have [Python](https://www.python.org/downloads/) version `>=3.11` installed on your machine. -2. Install the necessary dependencies: -```bash title="Change to the connector directory and install dependencies:" -cd my_subgraph/connector/my_python && pip3 install -r requirements.txt -``` -3. From the `my_python` directory, run this command to load environment variables from your project's `.env` file, -start the connector, and watch for any changes: - -On Mac or Linux: -```bash -watchmedo auto-restart --patterns="*.py" --recursive -- sh -c "export \$(grep -v '^#' .env.local | xargs) && python3 functions.py serve" -``` - -On Windows: -```powershell -watchmedo auto-restart --patterns="*.py" --recursive -- powershell -Command "Get-Content .env.local | ForEach-Object { if ($_ -notmatch '^#') { $var = $_.Split('='); Set-Item \"env:$($var[0])\" $var[1] } }; python3 functions.py serve" -``` - -Make sure the port specified in your connector's `.env` file doesn't conflict with any other services you may be -running. - -::: - -## What did this do? - -The commands above initialized a new Python Lambda connector, installed dependencies, and created a new function to -format a timestamp with timezone into a human-readable format. We then added this function to our metadata as a command, -and created a new build of our supergraph. - -## Next Steps - -You can also [create relationships](/getting-started/build/07-create-a-relationship.mdx) between types in your -supergraph and your commands. This enables you to pair custom business logic with β€” for example β€” database tables, and -then transform or enrich data before sending it back to your consumers. - -You can learn more about creating these and other relationships on the -[next page](/getting-started/build/07-create-a-relationship.mdx), or you can learn about -[mutating data](/getting-started/build/08-mutate-data.mdx) with examples using the TypeScript connector. Although the -examples for mutating data are provided in Typescript, you could just as easily use the Python connector. diff --git a/docs/getting-started/build/_databaseDocs/_typescript/_06-add-business-logic.mdx b/docs/getting-started/build/_databaseDocs/_typescript/_06-add-business-logic.mdx deleted file mode 100644 index 72af36463..000000000 --- a/docs/getting-started/build/_databaseDocs/_typescript/_06-add-business-logic.mdx +++ /dev/null @@ -1,245 +0,0 @@ -import Thumbnail from "@site/src/components/Thumbnail"; - -## What's about to happen? - -With Hasura, you can integrate β€” and even host β€” business logic directly with Hasura DDN and your API. - -You can handle custom business logic using the Node.js Lambda connector. Using this connector, you can transform or -enrich data before it reaches your consumers, or perform any other app business logic you may need. - -You can then integrate these functions as individual [**commands**](/supergraph-modeling/commands.mdx) in your metadata -and resulting API. This process simplifies client applications and speeds up your backend development. - - - -## Steps - -:::tip Required - -- [The DDN CLI, VS Code extension, and Docker installed](/getting-started/build/00-prerequisites.mdx) -- A new or existing [project](/getting-started/deployment/01-create-a-project.mdx) -- At least one [subgraph](/getting-started/build/02-init-subgraph.mdx) -- [Node.js](https://nodejs.org/en) version `>=20.0.0` - -::: - -In this guide we will: - -- Initialize the `hasura/nodejs` data connector -- Use Node.js and npm to create a simple script -- Generate the metadata to track the function script as a command in our metadata -- Create a new API build and test it - -### Step 1. Initialize the Node.js Lambda connector - -Let's begin by initializing the connector on our project. In the example below, you'll see a familiar flow and use the -`hasura/nodejs` connector from the connector hub. - -```bash title="Run the following command:" -ddn connector init my_ts -i -``` - -- Select `hasura/nodejs` from the list of connectors. -- Choose a port (press enter to accept the default recommended by the CLI). -- In this example, we've called the connector `my_ts`. You can name it something descriptive. - -:::tip Best practices - -Importantly, a data connector can only connect to one data source. - -The project will be kept organized with each data connector's configuration located in a relevant subgraph directory. In -this example the CLI will create a `my_subgraph/connector/my_ts` directory if it doesn't exist. You can also change -this directory by passing a `--dir` flag to the CLI. - -We recommend that the name of the connector and the directory in which the configuration is stored, `my_ts` in this -example, should match for convenience and clarity sake. - -::: - -#### What did this do? - -This command created the following file structure in a `my_subgraph/connector/my_ts` directory, with the `functions.ts` -file being your connector's entrypoint: - -```bash -. -β”œβ”€β”€ .ddnignore -β”œβ”€β”€ .gitignore -β”œβ”€β”€ .hasura-connector -β”‚ β”œβ”€β”€ ... -β”œβ”€β”€ compose.yaml -β”œβ”€β”€ connector.yaml -#highlight-start -β”œβ”€β”€ functions.ts -#highlight-end -β”œβ”€β”€ package-lock.json -β”œβ”€β”€ package.json -└── tsconfig.json -``` - -### Step 2. Write business logic - -In this simple example, we're going to transform a timestamp with timezone (eg: "2024-03-14T08:00:00Z") into a nicely -formatted version for humans, eg: "8am, Thursday, March 14th, 2024." - -We'll replace the default `hello()` function in our `functions.ts` file with the following: - -```ts -/** - * @readonly - */ -export async function formatTimezoneDate(dateString: string): Promise { - const date = new Date(dateString); - - const day = date.getDate(); - const nth = (d: number) => { - if (d > 3 && d < 21) return "th"; - switch (d % 10) { - case 1: - return "st"; - case 2: - return "nd"; - case 3: - return "rd"; - default: - return "th"; - } - }; - - const hours = date.getHours(); - const ampm = hours >= 12 ? "pm" : "am"; - const hour = hours % 12 || 12; - - const dayOfWeek = date.toLocaleString("en-US", { weekday: "long" }); - const month = date.toLocaleString("en-US", { month: "long" }); - const year = date.getFullYear(); - - return `${hour}${ampm}, ${dayOfWeek}, ${month} ${day}${nth(day)}, ${year}.`; -} -``` - -As this is a Node.js project, you can install any dependency! - -### Step 3. Track the new function - -To add our function, we can use the following to generate the related metadata that will link together any functions in -this `functions.ts` file and our API. - -```bash -ddn connector introspect my_ts -``` - -Then, we can generate an `hml` file for the function using the following command. - -```bash title="Track the function:" -ddn command add my_ts formatTimezoneDate -``` - -:::info Have a lot of functions? - -If you have a lot of functions and want to add them all en masse, you can use the CLI to generate metadata for each. - -```bash title="Run the following:" -ddn command add my_ts "*" -``` - -::: - -#### What did this do? - -The command introspected your `functions.ts` file and found any functions present along with their argument and return -types. - -The CLI then used this to create Hasura metadata for each function which can then be exposed in your API. - -### Step 4. Create a new API build and test - -Next, let's create a new build of our supergraph: - -```bash -ddn supergraph build local -``` - -:::tip Start your engines! - -Want to test your supergraph? Don't forget to start your GraphQL engine using the following command: - -```bash title="Run:" -ddn run docker-start -``` - -This reads the `docker-start` script from the context config at `.hasura/context.yaml` and starts your Hasura engine, -any connectors, and observability tools. - -::: - -You should see your command available, along with its documentation, in the GraphiQL explorer which you should be able -to access using the CLI. - -```bash title="Run:" -ddn console --local -``` - -```graphql title=" You can then test your new command with the following query:" -query MyQuery { - formatTimezoneDate(dateString: "2024-03-14T08:00:00Z") -} -``` - - - -
- -:::tip Privacy settings in some browsers - -Your browser settings or privacy tools may prevent the Console from accessing your local Hasura instance. This could be -due to features designed to protect your privacy and security. Should you encounter one of these issues, we recommend -disabling these settings for the `console.hasura.io` domain. - -[Chrome](https://www.google.com/chrome/) and [Firefox](https://www.mozilla.org/en-US/firefox/new/) are the recommended -browsers for the best experience with the Hasura Console including for local development. - -::: - -:::note Running Node.js locally - -By default, Node.js and your functions run in a Docker container along with your other Hasura services. The -`connector init` command created a `compose.yaml` file for you, which are used by default. - -However, if you prefer to run Node.js directly on your local machine, you can do so by following these steps: - -1. Ensure you have [Node.js](https://nodejs.org/en) version `>=20.0.0` installed on your machine. -2. Install the necessary dependencies: -```bash title="Change to the connector directory and install dependencies:" -cd my_subgraph/connector/my_ts && npm i -``` -3. From the `my_ts` directory, run this command to load environment variables from your project's `.env` file, start the -connector, and watch for any changes: -```bash title="Run the connector with env vars loaded from config" -ddn connector setenv --connector connector.yaml -- npm run start -``` - -Make sure the port specified in your connector's `.env` file doesn't conflict with any other services you may be -running. - -::: - -## What did this do? - -The commands above initialized a new Node.js Lambda connector, installed dependencies, and created a new function to -format a timestamp with timezone into a human-readable format. We then added this function to our metadata as a command, -and created a new build of our supergraph. - -## Next Steps - -You can also [create relationships](/getting-started/build/07-create-a-relationship.mdx) between types in your -supergraph and your commands. This enables you to pair custom business logic with β€” for example β€” database tables, and -then transform or enrich data before sending it back to your consumers. - -You can learn more about creating these and other relationships on the -[next page](/getting-started/build/07-create-a-relationship.mdx), or you can learn about -[mutating data](/getting-started/build/08-mutate-data.mdx) using the TypeScript connector. diff --git a/docs/getting-started/build/index.mdx b/docs/getting-started/build/index.mdx deleted file mode 100644 index ec1da4eff..000000000 --- a/docs/getting-started/build/index.mdx +++ /dev/null @@ -1,36 +0,0 @@ ---- -sidebar_position: 1 -sidebar_label: Overview -description: "Learn how to get started with Hasura DDN and your GraphQL API." -keywords: - - hasura ddn - - graphql api - - quickstart - - getting started - - guide ---- - -import Thumbnail from "@site/src/components/Thumbnail"; - -# Getting Started - -## Introduction - -Let's build a supergraph. - -In this guide we'll walk through using Hasura DDN. - -You're likely going to be introduced to new terminology, tools and concepts in the next few pages. Don't worry, they -will quickly become second nature to you. - -:::info Local dev, verbose CLI, and using your own data - -In this section we'll guide you through the process of building a supergraph API **locally** and thereafter, deploy it -to Hasura DDN cloud. - -**We also recommend that you use your own data sources to effectively grok the concepts we're covering.** However, when -connecting a data source, we'll include some sample data source connection strings for you. - -::: - -Get started by [installing the prerequisites](/getting-started/build/00-prerequisites.mdx). diff --git a/docs/getting-started/collaborate/_category_.json b/docs/getting-started/collaborate/_category_.json deleted file mode 100644 index e67594c33..000000000 --- a/docs/getting-started/collaborate/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Collaborate", - "position": 11 -} diff --git a/docs/getting-started/deployment/_category_.json b/docs/getting-started/deployment/_category_.json deleted file mode 100644 index 09cd8f317..000000000 --- a/docs/getting-started/deployment/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Deploy", - "position": 10 -} diff --git a/docs/getting-started/overview.mdx b/docs/getting-started/overview.mdx index 3539f100f..27c90d0f0 100644 --- a/docs/getting-started/overview.mdx +++ b/docs/getting-started/overview.mdx @@ -14,6 +14,7 @@ hide_table_of_contents: true import { OverviewTopSectionIconNoVideo } from "@site/src/components/OverviewTopSectionIconNoVideo"; import { OverviewPlainCard } from "@site/src/components/OverviewPlainCard"; import Icon from "@site/static/icons/speedometer-04.svg"; +import Link from "@docusaurus/Link"; # Getting Started @@ -22,40 +23,35 @@ import Icon from "@site/static/icons/speedometer-04.svg"; links={[]} intro={
+

It's easy and fast to get started with Hasura DDN.

- Getting started with Hasura DDN is quick and straightforward. In just a few steps, you can connect your data - sources, define your API, and begin building your data supergraph. Hasura's powerful tools, like the CLI and - console, make it easy to create and manage your supergraph, all while ensuring seamless integration with your - data. -

-

- Whether you're using a sample project or your own data, you'll be up and running in no time. Hasura’s intuitive - setup process lets you focus on building, not on configuring, so you can quickly ship the next great update to - your product. + Use our handy dummy data sources or connect your own data. Follow our{" "} + Quickstart or select one of the guides for our main data source + types.

} /> -## Get started with Hasura DDN +## Get started with favorite source
diff --git a/docs/getting-started/quickstart.mdx b/docs/getting-started/quickstart.mdx index 1f307794d..bf5fa2547 100644 --- a/docs/getting-started/quickstart.mdx +++ b/docs/getting-started/quickstart.mdx @@ -16,27 +16,23 @@ is_guide: true import Step from "@site/src/components/CodeStep"; import InstallTheCli from "@site/docs/_install-the-cli.mdx"; import CodeBlock from "@theme/CodeBlock"; +import SimpleVideo from "@site/src/components/SimpleVideo"; +import Admonition from '@theme/Admonition'; + # Quickstart a Supergraph API -
-
-

- In less than a minute and without needing a data source connection string, you can have a supergraph API - running locally and deployed on Hasura DDN. πŸš€ -

+
+
+ {/* If you break this onto another line it will lose its styling because the markdown rendering engine will add a

tag. Please leave it as a single line. Thanks! */} +

In less than a minute and without needing a data source connection string, you can have a supergraph API running locally and deployed on Hasura DDN. πŸš€

-
-