diff --git a/.github/ISSUE_TEMPLATE/b-propose-new-content.yml b/.github/ISSUE_TEMPLATE/b-propose-new-content.yml new file mode 100644 index 00000000000..1dc2816d311 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/b-propose-new-content.yml @@ -0,0 +1,43 @@ +name: "Propose new content" +description: "For proposing new content, such as a new guide or a new page." +title: "[idea] " +labels: ["content","idea"] +body: +- type: markdown + attributes: + value: | + Thank you for sharing your idea for the dbt product documentation! Here are a few things to consider: + * You can submit ideas or suggest changes to our content by opening an [Issue](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose). + * Before filing an issue, please [search our current issues](https://github.com/dbt-labs/docs.getdbt.com/issues) to avoid duplicates. + * Please read the [Contributing guide](https://github.com/dbt-labs/docs.getdbt.com#contributing) if you want to open a pull request. + +- type: checkboxes + id: author_type + attributes: + label: "Which of these best describes you?" + options: + - label: "dbt Community member" + - label: "Partner" + - label: "dbt Labs employee" + - label: "Other" + validations: + required: false + +- type: textarea + id: content_idea + attributes: + label: "What's your idea for new content?" + description: | + - Give as much detail as you can to help us understand your idea. + - Why do you think this content is important? + - Who will this new content help? + validations: + required: true + +- type: textarea + id: location + attributes: + label: Where would you recommend this content live on the docs.getdbt.com? + description: "Please link to the page or pages you think best fit." + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml b/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml deleted file mode 100644 index 037da98dc6f..00000000000 --- a/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Propose a dbt Developer Blog idea -description: > - For proposing a new post on the dbt Developer Blog. -labels: ["content","developer blog"] -body: - - type: markdown - attributes: - value: | - We're excited to hear about your idea for the dbt Developer Blog. This template will help lay out the proposed post and then we will work with on next steps! - - - type: input - id: contact - attributes: - label: Contact Details - description: How can we get in touch with you? - placeholder: your preferred email and/or dbt Slack handle - validations: - required: true - - - type: checkboxes - id: read-contribution - attributes: - label: I have read the dbt Developer Blog contribution guidelines. - description: You can find the contribution guide [here](https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/developer-blog.md) - options: - - label: I have read the dbt Developer Blog contribution guidelines. - validations: - required: true - - - type: checkboxes - id: author_type - attributes: - label: Which of these best describes you? - options: - - label: I am a dbt Community member or partner contributing to the Developer Blog - - label: I work for dbt Labs and am creating this issue for a community or marketing approved piece. - validations: - - - - type: textarea - attributes: - label: > - What is the topic of your post? - description: | - Please provide a short (~ 1 paragraph) summary as well as who this post is targeted towards (ie people interested in learning more about dbt snapshots or advanced Jinja users) - validations: - required: true - - - type: textarea - attributes: - label: Link to an initial outline. - description: Please link to a short outline in Notion, or Google Docs - validations: - required: true diff --git a/.github/ISSUE_TEMPLATE/external-core-team.yml b/.github/ISSUE_TEMPLATE/external-core-team.yml index 51460952c6c..7e90a2840d0 100644 --- a/.github/ISSUE_TEMPLATE/external-core-team.yml +++ b/.github/ISSUE_TEMPLATE/external-core-team.yml @@ -1,5 +1,5 @@ name: dbt Core - Request changes to docs -description: File a docs update request that is not already tracked in Orch team's Release Plans (Notion database). +description: "File a docs update or feature request related to dbt Core content." title: "[Core] <title>" labels: ["content","dbt Core"] body: diff --git a/website/docs/docs/build/data-tests.md b/website/docs/docs/build/data-tests.md index 0806842a85c..0261ea2d8b2 100644 --- a/website/docs/docs/build/data-tests.md +++ b/website/docs/docs/build/data-tests.md @@ -13,6 +13,16 @@ keywords: * [Data test configurations](/reference/data-test-configs) * [Test selection examples](/reference/node-selection/test-selection-examples) +<VersionBlock firstVersion="1.8"> + +:::important + +In dbt v1.8, what was previously known as "tests" are now called "data tests" with the addition of [unit tests](/docs/build/unit-tests). The YAML key `tests:` is still supported as an alias for data tests but will be deprecated in the future in favor of `data_tests:`. Refer to [New syntax](#new-tests-syntax) for more information. + +::: + +</VersionBlock> + ## Overview Data tests are assertions you make about your models and other resources in your dbt project (e.g. sources, seeds and snapshots). When you run `dbt test`, dbt will tell you if each test in your project passes or fails. @@ -251,6 +261,28 @@ Note that, if you select to store test failures: * Test result tables are created in a schema suffixed or named `dbt_test__audit`, by default. It is possible to change this value by setting a `schema` config. (For more details on schema naming, see [using custom schemas](/docs/build/custom-schemas).) - A test's results will always **replace** previous failures for the same test. +<VersionBlock firstVersion="1.8" lastVersion="1.8"> + +## New `data_tests:` syntax + +Data tests were historically called "tests" in dbt as the only form of testing available. With the introduction of unit tests in v1.8, it was necessary to update our naming conventions and syntax. As of v1.8, `tests:` is still supported in your YML configuration file as an alias but will be deprecated in the future in favor of `data_tests:`. + +As we progress towards this deprecation, the examples in our docs pages will be updated to reflect this new syntax, but we highly recommend you begin the migration process as soon as you upgrade to v1.8 to avoid interruptions or issues in the future. + +```yml + +models: + - name: orders + columns: + - name: order_id + data_tests: + - unique + - not_null + + +``` +</VersionBlock> + ## FAQs <FAQ path="Tests/test-one-model" /> diff --git a/website/docs/docs/build/saved-queries.md b/website/docs/docs/build/saved-queries.md index 0c3d8a7fc1c..6cfe1315782 100644 --- a/website/docs/docs/build/saved-queries.md +++ b/website/docs/docs/build/saved-queries.md @@ -22,21 +22,33 @@ Saved queries are distinct from [exports](/docs/use-dbt-semantic-layer/exports), All metrics in a saved query need to use the same dimensions in the `group_by` or `where` clauses. The following is an example of a saved query: +<!-- For versions 1.8 and higher --> +<VersionBlock firstVersion="1.8"> + <File name='semantic_model.yml'> ```yaml saved_queries: - - name: p0_booking - description: Booking-related metrics that are of the highest priority. + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test saved query + config: + cache: + enabled: true # Or false if you want it disabled by default query_params: - metrics: - - bookings - - instant_bookings - group_by: - - TimeDimension('metric_time', 'day') - - Dimension('listing__capacity_latest') - where: - - "{{ Dimension('listing__capacity_latest') }} > 3" + metrics: + - simple_metric + group_by: + - "Dimension('user__ds')" + where: + - "{{ Dimension('user__ds', 'DAY') }} <= now()" + - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name ``` </File> @@ -48,6 +60,9 @@ To define a saved query, refer to the following parameters: |-------|---------|----------|----------------| | `name` | String | Required | Name of the saved query object. | | `description` | String | Required | A description of the saved query. | +| `label` | String | Required | The display name for your saved query. This value will be shown in downstream tools. | +| `config` | String | Required | A config section for any parameters specifying the saved query. | +| `config::cache` | String | Optional | A boolean to specify if a saved query should be used to populate the cache. Accepts `true` or `false`. Defaults to `false` | | `query_params` | Structure | Required | Contains the query parameters. | | `query_params::metrics` | List or String | Optional | A list of the metrics to be used in the query as specified in the command line interface. | | `query_params::group_by` | List or String | Optional | A list of the Entities and Dimensions to be used in the query, which include the `Dimension` or `TimeDimension`. | @@ -57,10 +72,60 @@ To define a saved query, refer to the following parameters: | `exports::config` | List or Structure | Required | A config section for any parameters specifying the export. | | `exports::config::export_as` | String | Required | The type of export to run. Options include table or view currently and cache in the near future. | | `exports::config::schema` | String | Optional | The schema for creating the table or view. This option cannot be used for caching. | -| `exports::config::alias` | String | Optional | The table alias to use to write the table or view. This option cannot be used for caching. | +| `exports::config::alias` | String | Optional | The table alias used to write to the table or view. This option cannot be used for caching. | -All metrics in a saved query need to use the same dimensions in the `group_by` or `where` clauses. +</VersionBlock> + +<!-- For versions 1.7 and lower--> +<VersionBlock lastVersion="1.7"> + +<File name='semantic_model.yml'> + +```yaml +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test saved query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('user__ds')" + where: + - "{{ Dimension('user__ds', 'DAY') }} <= now()" + - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name +``` +</File> + +## Parameters + +To define a saved query, refer to the following parameters: +| Parameter | Type | Required | Description | +|-------|---------|----------|----------------| +| `name` | String | Required | Name of the saved query object. | +| `description` | String | Required | A description of the saved query. | +| `label` | String | Required | The display name for your saved query. This value will be shown in downstream tools. | +| `query_params` | Structure | Required | Contains the query parameters. | +| `query_params::metrics` | List or String | Optional | A list of the metrics to be used in the query as specified in the command line interface. | +| `query_params::group_by` | List or String | Optional | A list of the Entities and Dimensions to be used in the query, which include the `Dimension` or `TimeDimension`. | +| `query_params::where` | List or String | Optional | A list of strings that may include the `Dimension` or `TimeDimension` objects. | +| `exports` | List or Structure | Optional | A list of exports to be specified within the exports structure. | +| `exports::name` | String | Required | Name of the export object. | +| `exports::config` | List or Structure | Required | A config section for any parameters specifying the export. | +| `exports::config::export_as` | String | Required | The type of export to run. Options include table or view currently and cache in the near future. | +| `exports::config::schema` | String | Optional | The schema for creating the table or view. This option cannot be used for caching. | +| `exports::config::alias` | String | Optional | The table alias used to write to the table or view. This option cannot be used for caching. | + +</VersionBlock> + +All metrics in a saved query need to use the same dimensions in the `group_by` or `where` clauses. ## Related docs diff --git a/website/docs/docs/core/about-core-setup.md b/website/docs/docs/core/about-core-setup.md index 8b170ba70d4..16bfe18fc37 100644 --- a/website/docs/docs/core/about-core-setup.md +++ b/website/docs/docs/core/about-core-setup.md @@ -14,7 +14,9 @@ dbt Core is an [open-source](https://github.com/dbt-labs/dbt-core) tool that ena - [Connecting to a data platform](/docs/core/connect-data-platform/profiles.yml) - [How to run your dbt projects](/docs/running-a-dbt-project/run-your-dbt-projects) -To learn about developing dbt projects in dbt Cloud, refer to [Develop with dbt Cloud](/docs/cloud/about-develop-dbt). - - dbt Cloud provides a command line interface with the [dbt Cloud CLI](/docs/cloud/cloud-cli-installation). Both dbt Core and the dbt Cloud CLI are command line tools that let you run dbt commands. The key distinction is the dbt Cloud CLI is tailored for dbt Cloud's infrastructure and integrates with all its [features](/docs/cloud/about-cloud/dbt-cloud-features). +If you're interested in using a command line interface to [develop dbt projects in dbt Cloud](/docs/cloud/about-develop-dbt), the [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) lets you run dbt commands locally. The dbt Cloud CLI is tailored for dbt Cloud's infrastructure and integrates with all its [features](/docs/cloud/about-cloud/dbt-cloud-features). If you need a more detailed first-time setup guide for specific data platforms, read our [quickstart guides](https://docs.getdbt.com/guides). + +## Related docs +- [Move from dbt Core to dbt Cloud](/guides/core-to-cloud-1?step=1) diff --git a/website/docs/docs/core/installation-overview.md b/website/docs/docs/core/installation-overview.md index 8c139012667..2e112066048 100644 --- a/website/docs/docs/core/installation-overview.md +++ b/website/docs/docs/core/installation-overview.md @@ -50,3 +50,6 @@ Most command-line tools, including dbt, have a `--help` flag that you can use to — `dbt run --help`: Lists the flags available for the `run` command ::: + +## Related docs +- [Move from dbt Core to dbt Cloud](/guides/core-to-cloud-1?step=1) diff --git a/website/docs/docs/deploy/deploy-jobs.md b/website/docs/docs/deploy/deploy-jobs.md index a0c04b9a6b8..ee703895f7a 100644 --- a/website/docs/docs/deploy/deploy-jobs.md +++ b/website/docs/docs/deploy/deploy-jobs.md @@ -105,7 +105,7 @@ Here are examples of cron job schedules. The dbt Cloud job scheduler supports us - `0 7 L * 5`: At 07:00 AM, on the last day of the month, and on Friday. - `30 14 L * *`: At 02:30 PM, on the last day of the month. -### Trigger on job completion <Lifecycle status="team,enterprise" /> +### Trigger on job completion <Lifecycle status="team,enterprise" /> To _chain_ deploy jobs together, enable the **Run when another job finishes** option and specify the upstream (parent) job that, when it completes, will trigger your job. You can also use the [Create Job API](/dbt-cloud/api-v2#/operations/Create%20Job) to do this. diff --git a/website/docs/guides/core-to-cloud-1.md b/website/docs/guides/core-to-cloud-1.md new file mode 100644 index 00000000000..b1737cb14f4 --- /dev/null +++ b/website/docs/guides/core-to-cloud-1.md @@ -0,0 +1,255 @@ +--- +title: 'Move from dbt Core to dbt Cloud: Get started' +id: core-to-cloud-1 +description: "Learn how to move from dbt Core to dbt Cloud and what you need to get started." +hoverSnippet: "Learn how to move from dbt Core to dbt Cloud." +icon: 'guides' +time_to_complete: 'Total estimated time: 3-4 hours' +hide_table_of_contents: true +tags: ['Migration','dbt Core','dbt Cloud'] +level: 'Intermediate' +recently_updated: true +--- +## Introduction + +Moving from dbt Core to dbt Cloud streamlines analytics engineering workflows by allowing teams to develop, test, deploy, and explore data products using a single, fully managed software service. + +dbt Cloud is the fastest and most reliable way to deploy dbt. It enables you to develop, test, deploy, and explore data products using a single, fully managed service. It also supports: +- The [dbt Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) or command line with [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) for development +- [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-sl) for consistent metrics +- Domain ownership of data with multi-project [dbt Mesh](/best-practices/how-we-mesh/mesh-1-intro) setups +- [dbt Explorer](/docs/collaborate/explore-projects) for easier data discovery and understanding + +Learn more about [dbt Cloud features](/docs/cloud/about-cloud/dbt-cloud-features). + +dbt Core is an open-source tool that enables data teams to transform data following analytics engineering best practices using a command line interface. It must be self-hosted and maintained. + +### What you'll learn + +This guide outlines the steps you need to take to move from dbt Core to dbt Cloud and highlights the necessary technical changes: + +- [Account setup](https://docs.getdbt.com/guides/core-to-cloud-1?step=3#account-setup): Learn how to create a dbt Cloud account, invite team members, and configure it for your team. +- [Data platform setup](https://docs.getdbt.com/guides/core-to-cloud-1?step=4#data-platform-setup): Find out about connecting your data platform to dbt Cloud. +- [Git setup](https://docs.getdbt.com/guides/core-to-cloud-1?step=5#git-setup): Learn to link your dbt project's Git repository with dbt Cloud. +- [Developer setup:](https://docs.getdbt.com/guides/core-to-cloud-1?step=6#developer-setup) Understand the setup needed for developing in dbt Cloud. +- [Environment variables](https://docs.getdbt.com/guides/core-to-cloud-1?step=7#environment-variables): Discover how to manage environment variables in dbt Cloud, including their priority. +- [Orchestration setup](https://docs.getdbt.com/guides/core-to-cloud-1?step=8#orchestration-setup): Learn how to prepare your dbt Cloud environment and jobs for orchestration. +- [Models configuration](https://docs.getdbt.com/guides/core-to-cloud-1?step=9#models-configuration): Get insights on validating and running your models in dbt Cloud, using either the dbt Cloud IDE or dbt Cloud CLI. +- [What's next?](https://docs.getdbt.com/guides/core-to-cloud-1?step=10#whats-next): Summarizes key takeaways and introduces what to expect in the following guides. + +## Prerequisites + +- You have an existing dbt Core project connected to a Git repository and data platform supported in [dbt Cloud](/docs/cloud/connect-data-platform/about-connections). +- You are using dbt version 1.0 or later. +- You have a dbt Cloud account. **[Don't have one? Start your free trial today](https://www.getdbt.com/signup)**! + +### Related docs +- [Learn dbt Cloud](https://courses.getdbt.com/collections) +- [Develop with dbt Cloud](/docs/cloud/about-develop-dbt) +- [Deploy jobs](/docs/deploy/deployments) +- Book [expert-led demos](https://www.getdbt.com/resources/dbt-cloud-demos-with-experts) and insights +- Work with the [dbt Labs’ Professional Services](https://www.getdbt.com/dbt-labs/services) team to support your data organization and migration. + +## Account setup + +This section outlines the steps to set up your dbt Cloud account and configure it for your team. + +1. [Create your dbt Cloud account](https://www.getdbt.com/signup). + +2. Provide user [access](/docs/cloud/manage-access/about-user-access) and [invite users](/docs/cloud/manage-access/about-user-access) to your dbt Cloud account and project. + +3. Configure [Single Sign-On (SSO)](/docs/cloud/manage-access/sso-overview) or [Role-based access control (RBAC)](/docs/cloud/manage-access/about-user-access#role-based-access-control) for easy and secure access. <Lifecycle status='enterprise' /> + - This removes the need to save passwords and secret environment variables locally. + +4. In **Account settings**, enable [partial parsing](/docs/deploy/deploy-environments#partial-parsing) to only reparse changed files, saving time. + +5. In **Account settings**, enable [Git repo caching](/docs/deploy/deploy-environments#git-repository-caching) for job reliability & third-party outage protection. <Lifecycle status='enterprise' /> + +## Data platform setup + +This section outlines the considerations and methods to connect your data platform to dbt Cloud. + +1. In dbt Cloud, set up your [data platform connections](/docs/cloud/connect-data-platform/about-connections) and [environment variables](/docs/build/environment-variables). dbt Cloud can connect with a variety of data platform providers including: + - [AlloyDB](/docs/cloud/connect-data-platform/connect-redshift-postgresql-alloydb) + - [Amazon Redshift](/docs/cloud/connect-data-platform/connect-redshift-postgresql-alloydb) + - [Apache Spark](/docs/cloud/connect-data-platform/connect-apache-spark) + - [Databricks](/docs/cloud/connect-data-platform/connect-databricks) + - [Google BigQuery](/docs/cloud/connect-data-platform/connect-bigquery) + - [Microsoft Fabric](/docs/cloud/connect-data-platform/connect-microsoft-fabric) + - [PostgreSQL](/docs/cloud/connect-data-platform/connect-redshift-postgresql-alloydb) + - [Snowflake](/docs/cloud/connect-data-platform/connect-snowflake) + - [Starburst or Trino](/docs/cloud/connect-data-platform/connect-starburst-trino) + +2. You can verify your data platform connections by clicking the **Test connection** button in your deployment and development credentials settings. + +### Additional configuration + +Explore these additional configurations to optimize your data platform setup further: + +1. Use [OAuth connections](/docs/cloud/manage-access/set-up-snowflake-oauth), which enables secure authentication using your data platform’s SSO. <Lifecycle status='enterprise' /> + +## Git setup + +Your existing dbt project source code should live in a Git repository. In this section, you will connect your existing dbt project source code from Git to dbt Cloud. + +1. Ensure your dbt project is in a Git repository. + +2. In **Account settings**, select **Integrations**, and [connect and configure Git](/docs/cloud/git/git-configuration-in-dbt-cloud) in dbt Cloud to connect your Git repository: + - Connect with one of the [native integrations](/docs/cloud/git/git-configuration-in-dbt-cloud) in dbt Cloud (such as GitHub, GitLab, and Azure DevOps). + - [Import a Git repository](/docs/cloud/git/import-a-project-by-git-url) from any valid Git URL that points to a dbt project. + +### Additional configuration +Explore these additional configurations to optimize your Git setup further: + +1. Log into dbt Cloud using [OAuth connections](/docs/cloud/git/connect-github) to integrate with your source code platform. It automatically links to the repository using one of the native integrations set at the account level. <Lifecycle status='enterprise' /> + + Set up groups for dbt project access with those configured for repository access to streamline permissions. + +## Developer setup + +This section highlights the development configurations you’ll need for your dbt Cloud project. The following categories are covered in this section: + +- [dbt Cloud environments](/guides/core-to-cloud-1?step=7#dbt-cloud-environments) +- [Initial setup steps](/guides/core-to-cloud-1?step=7#initial-setup-steps) +- [Additional configuration](/guides/core-to-cloud-1?step=7#additional-configuration-2) +- [dbt Cloud commands](/guides/core-to-cloud-1?step=7#dbt-cloud-commands) + +### dbt Cloud environments + +The concept of an [environment](/docs/environments-in-dbt) in dbt Cloud is the same as a `target` in dbt Core. + +The primary difference between a dbt Cloud environment and a `target` in dbt Core is that you can make these configurations through the dbt Cloud UI instead of within the `profiles.yml` file. + +This difference streamlines the process of switching between development, staging, and production contexts, removing the need to manually edit the `profiles.yml` file. dbt Cloud environments also integrate with additional features such as job scheduling, version control, and more — making it easier to manage the full lifecycle of your dbt projects within a single platform. You can [set up](/reference/dbt-jinja-functions/target) or [customize](/docs/build/custom-target-names) target names in dbt Cloud. + +### Initial setup steps +1. **Set up development environment** — Set up your [development](/docs/dbt-cloud-environments#create-a-development-environment) environment and [development credentials](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud#access-the-cloud-ide). You’ll need this to access your dbt project and start developing. + +2. **dbt Core version** — In your dbt Cloud environment and credentials, use the same dbt Core version you use locally. You can run `dbt --version` in the command line to find out which version of dbt Core you’re using. + - Once your full migration is complete, consider upgrading your environments to [Keep on latest version](/docs/dbt-versions/upgrade-dbt-version-in-cloud#keep-on-latest-version-) to always get the latest features and more. + +3. **Connect to your data platform** — When using dbt Cloud, you can [connect to your data platform](/docs/cloud/connect-data-platform/about-connections) directly in the UI. + - Each environment is roughly equivalent to an entry in your `profiles.yml` file. This means you don't need a `profiles.yml` file in your project. +4. **Development tools** — Set up your development workspace with the [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) or [dbt Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) to edit and develop your dbt code in your tool of choice. + +### Additional configuration +Explore these additional configurations to optimize your developer setup further: +1. **Custom target names** — If you’re using a [`custom target.name`](/reference/dbt-jinja-functions/target) in your project, we recommend you set them using [environment variables](/docs/build/environment-variables). Alternatively, you can update it at the developer credentials level. + +### dbt Cloud commands +1. Review the [dbt commands](/reference/dbt-commands) supported for dbt Cloud development. For example, `dbt debug` isn’t needed in dbt Cloud since the UI displays logs for your viewing. + +## Environment variables +This section will help you understand how to set up and manage dbt Cloud environment variables for your project. The following categories are covered: +- [Environment variables in dbt Cloud](/guides/core-to-cloud-1?step=7#environment-variables-in-dbt-cloud) +- [dbt Cloud environment variables order of precedence](/guides/core-to-cloud-1?step=7#dbt-cloud-environment-variables-order-of-precedence) +- [Set environment variables in dbt Cloud](/guides/core-to-cloud-1?step=7#set-environment-variables-in-dbt-cloud) + +In dbt Core, environment variables, or the [`env_var` function](/reference/dbt-jinja-functions/env_var), are defined in the `profiles.yml` file. +In dbt Cloud, you can set [environment variables](/docs/build/environment-variables) in the dbt Cloud user interface (UI). Read [Set up environment variables](#set-environment-variables-in-dbt-cloud) for more info. + +### Environment variables in dbt Cloud + - dbt Cloud environment variables must be prefixed with `DBT_` (including `DBT_ENV_CUSTOM_ENV_` or `DBT_ENV_SECRET_`). + - If your dbt Core environment variables don’t follow this naming convention, perform a [“find and replace”](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud#dbt-cloud-ide-features) in your project to make sure all references to these environment variables contain the proper naming conventions. +- dbt Cloud secures environment variables, offering additional measures for sensitive values, such as prefixing keys with `DBT_ENV_SECRET_` to obscure them in logs and the UI. + +<Lightbox src="/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/project-environment-view.png" title="Setting project level and environment level values"/> + +### dbt Cloud environment variables order of precedence +Environment variables in dbt Cloud are managed with a clear [order of precedence](/docs/build/environment-variables#setting-and-overriding-environment-variables), allowing users to define values at four levels (highest to lowest order of precedence): + - The job level (job override) or in the IDE for an individual developer (personal override). _Highest precedence_ + - The environment level, which can be overridden by the job level or personal override. + - A project-wide default value, which can be overridden by the environment level, job level, or personal override. + - The optional default argument supplied to the `env_var` Jinja function in the code. _Lowest precedence_ + +<Lightbox src="/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/env-var-precdence.png" title="Environment variables order of precedence"/> + +### Set environment variables in dbt Cloud + +- To set these variables for an entire project or specific environments, navigate to **Deploy** > **Environments** > **Environment variables** tab. +- To set these variables at the job level, navigate to **Deploy** > **Jobs** > **Select your job** > **Settings** > **Advanced settings**. +- To set these variables at the personal override level, navigate to **Profile Settings** > **Credentials** > **Select your project** > **Environment variables**. + +## Orchestration setup + +This section outlines the considerations and methods to set up your dbt Cloud environments and jobs for orchestration. The following categories are covered in this section: + +- [dbt Cloud environments](/guides/core-to-cloud-1?step=8#dbt-cloud-environments-1) +- [Initial setup steps](/guides/core-to-cloud-1?step=8#initial-setup-steps-1) +- [Additional configuration](/guides/core-to-cloud-1?step=8#additional-configuration-3) +- [CI/CD setup](/guides/core-to-cloud-1?step=8#cicd-setup) + +### dbt Cloud environments +To use the [dbt Cloud's job scheduler](/docs/deploy/job-scheduler), set up one environment as the production environment. This is the [deployment](/docs/deploy/deploy-environments) environment. You can set up multiple environments for different stages of your deployment pipeline, such as development, staging/QA, and production. + +### Initial setup steps +1. **dbt Core version** — In your environment settings, configure dbt Cloud with the same dbt Core version. + - Once your full migration is complete, we recommend upgrading your environments to [Keep on latest version](/docs/dbt-versions/upgrade-dbt-version-in-cloud#keep-on-latest-version-) to always get the latest features and more. + +2. **Configure your jobs** — [Create jobs](/docs/deploy/deploy-jobs#create-and-schedule-jobs) for automated or event-driven dbt jobs. You can use cron execution, manual, pull requests, or API triggers. + - Note that alongside [jobs in dbt Cloud](/docs/deploy/jobs), discover other ways to schedule and run your dbt jobs with the help of other tools. Refer to [Integrate with other tools](/docs/deploy/deployment-tools) for more information. + +### Additional configuration +Explore these additional configurations to optimize your dbt Cloud orchestration setup further: + +1. **Custom target names** — Set a `custom target.name` for every single [corresponding dbt Cloud job](/docs/build/custom-target-names). We recommend modifying the code to use [environment variables](/docs/build/environment-variables) instead since those can be set at the environment level. + +2. **dbt commands** — Add any relevant [dbt commands](/docs/deploy/job-commands) to execute your dbt Cloud jobs runs. + +3. **Notifications** — Set up [notifications](/docs/deploy/job-notifications) by configuring email and Slack alerts to monitor your jobs. + +4. **Monitoring tools** — Use [monitoring tools](/docs/deploy/monitor-jobs) like run history, job retries, job chaining, dashboard status tiles, and more for a seamless orchestration experience. + +5. **API access** — Create [API auth tokens](/docs/dbt-cloud-apis/authentication) and access to [dbt Cloud APIs](/docs/dbt-cloud-apis/overview) as needed. <Lifecycle status="team,enterprise" /> + +6. **dbt Explorer** — If you use [dbt Explorer](/docs/collaborate/explore-projects) and run production jobs with an external orchestrator, ensure your production jobs run `dbt run` or `dbt build` to update and view resources and its metadata in dbt Explorer. Running `dbt compile` will not update resources and its metadata. <Lifecycle status="team,enterprise" /> + +### CI/CD setup + +Building a custom solution to efficiently check code upon pull requests is complicated. With dbt Cloud, you can enable [continuous integration / continuous deployment (CI/CD)](/docs/deploy/continuous-integration) and configure dbt Cloud to run your dbt projects in a temporary schema when new commits are pushed to open pull requests. + +This build-on-PR functionality is a great way to catch bugs before deploying to production, and an essential tool for data practitioners. + +1. Set up an integration with a native Git application (such as Azure DevOps, GitHub, GitLab) and a CI environment in dbt Cloud. +2. Create [a CI/CD job](/docs/deploy/ci-jobs) to optimize workflows. +3. Run your jobs in a production environment to fully implement CI/CD. Future pull requests will also leverage the last production runs to compare against. + +## Models configuration + +In this section, you’ll be able to validate whether your models run or compile correctly in your development tool of choice: The [dbt Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) or [dbt Cloud CLI](/docs/cloud/cloud-cli-installation). + +You’ll want to make sure you set up your [development environment and credentials](/docs/dbt-cloud-environments#set-developer-credentials). + +1. In your [development tool](/docs/cloud/about-develop-dbt) of choice, you can review your dbt project and ensure your project is set up correctly and you’re able to run commands. This will: + - Make sure your project compiles correctly. + - Run a few models in the dbt Cloud IDE or dbt Cloud CLI to ensure you’re experiencing accurate results in development. + +2. Once your first job has successfully run in your production environment, use [dbt Explorer](/docs/collaborate/explore-projects) to view your project's [resources](/docs/build/projects) (such as models, tests, and metrics) and their <Term id="data-lineage" />  to gain a better understanding of its latest production state. <Lifecycle status="team,enterprise" /> + +## What’s next? + +<ConfettiTrigger> + +Congratulations on completing the first part of your move to dbt Cloud 🎉! + +You have learned: +- How to set up your dbt Cloud account +- How to connect your data platform and Git repository +- How to configure your development, orchestration, and CI/CD environments +You’ve set up your models and are ready to run your first job in dbt Cloud. + +For next steps, we'll soon share other guides on how to manage your move and tips/faqs. Stay tuned! + +<!-- +- Link to the next guide (managing your migration or move, etc.) +- Link to tips and faqs? +--> + +### Related docs +- [Learn dbt Cloud](https://courses.getdbt.com/collections) +- [Develop with dbt Cloud](/docs/cloud/about-develop-dbt) +- [Deploy jobs](/docs/deploy/deployments) +- Book [expert-led demos](https://www.getdbt.com/resources/dbt-cloud-demos-with-experts) and insights +- Work with the [dbt Labs’ Professional Services](https://www.getdbt.com/dbt-labs/services) team to support your data organization and migration. + +</ConfettiTrigger> diff --git a/website/docs/guides/manual-install-qs.md b/website/docs/guides/manual-install-qs.md index fcd1e5e9599..b433649299f 100644 --- a/website/docs/guides/manual-install-qs.md +++ b/website/docs/guides/manual-install-qs.md @@ -10,7 +10,9 @@ hide_table_of_contents: true --- ## Introduction -When you use dbt Core to work with dbt, you will be editing files locally using a code editor, and running projects using a command line interface (CLI). If you'd rather edit files and run projects using the web-based Integrated Development Environment (IDE), you should refer to the [dbt Cloud quickstarts](/guides). You can also develop and run dbt commands using the [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) — a dbt Cloud powered command line. +When you use dbt Core to work with dbt, you will be editing files locally using a code editor, and running projects using a command line interface (CLI). + +If you want to edit files and run projects using the web-based dbt Integrated Development Environment (IDE), refer to the [dbt Cloud quickstarts](/guides). You can also develop and run dbt commands using the [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) — a dbt Cloud powered command line. ### Prerequisites @@ -23,6 +25,9 @@ When you use dbt Core to work with dbt, you will be editing files locally using After setting up BigQuery to work with dbt, you are ready to create a starter project with example models, before building your own models. +### Related docs +- [Move from dbt Core to dbt Cloud](/guides/core-to-cloud-1?step=1) + ## Create a repository The following steps use [GitHub](https://github.com/) as the Git provider for this guide, but you can use any Git provider. You should have already [created a GitHub account](https://github.com/join). diff --git a/website/docs/guides/zapier-new-cloud-job.md b/website/docs/guides/zapier-new-cloud-job.md deleted file mode 100644 index b16fa94bc21..00000000000 --- a/website/docs/guides/zapier-new-cloud-job.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -title: "Trigger a dbt Cloud job after a run finishes" -id: zapier-new-cloud-job -description: Use Zapier to trigger a dbt Cloud job once a run completes. -hoverSnippet: Learn how to use Zapier to trigger a dbt Cloud job once a run completes. -# time_to_complete: '30 minutes' commenting out until we test -icon: 'guides' -hide_table_of_contents: true -tags: ['Webhooks'] -level: 'Advanced' -recently_updated: true ---- - -## Introduction - -This guide will show you how to trigger a dbt Cloud job based on the successful completion of a different job. This can be useful when you need to trigger a job in a different project. Remember that dbt works best when it understands the whole context of the <Term id="dag"/> it has been asked to run, so use this ability judiciously. - -### Prerequisites - -In order to set up the integration, you should have familiarity with: -- [dbt Cloud Webhooks](/docs/deploy/webhooks) -- Zapier - -## Create a new Zap in Zapier -Use **Webhooks by Zapier** as the Trigger, and **Catch Raw Hook** as the Event. If you don't intend to [validate the authenticity of your webhook](/docs/deploy/webhooks#validate-a-webhook) (not recommended!) then you can choose **Catch Hook** instead. - -Press **Continue**, then copy the webhook URL. - -![Screenshot of the Zapier UI, showing the webhook URL ready to be copied](/img/guides/orchestration/webhooks/zapier-common/catch-raw-hook.png) - -## Configure a new webhook in dbt Cloud -See [Create a webhook subscription](/docs/deploy/webhooks#create-a-webhook-subscription) for full instructions. Your event should be **Run completed**, and you need to change the **Jobs** list to only contain the job you want to trigger the next run. - -Make note of the Webhook Secret Key for later. - -Once you've tested the endpoint in dbt Cloud, go back to Zapier and click **Test Trigger**, which will create a sample webhook body based on the test event dbt Cloud sent. - -The sample body's values are hard-coded and not reflective of your project, but they give Zapier a correctly-shaped object during development. - -## Store secrets -In the next step, you will need the Webhook Secret Key from the prior step, and a dbt Cloud [user token](https://docs.getdbt.com/docs/dbt-cloud-apis/user-tokens) or [service account token](https://docs.getdbt.com/docs/dbt-cloud-apis/service-tokens). - -Zapier allows you to [store secrets](https://help.zapier.com/hc/en-us/articles/8496293271053-Save-and-retrieve-data-from-Zaps), which prevents your keys from being displayed in plaintext in the Zap code. You will be able to access them via the [StoreClient utility](https://help.zapier.com/hc/en-us/articles/8496293969549-Store-data-from-code-steps-with-StoreClient). - -<Snippet path="webhook_guide_zapier_secret_store" /> - -## Add a code action -Select **Code by Zapier** as the App, and **Run Python** as the Event. - -In the **Set up action** area, add two items to **Input Data**: `raw_body` and `auth_header`. Map those to the `1. Raw Body` and `1. Headers Http Authorization` fields from the **Catch Raw Hook** step above. - -![Screenshot of the Zapier UI, showing the mappings of raw_body and auth_header](/img/guides/orchestration/webhooks/zapier-common/run-python.png) - -In the **Code** field, paste the following code, replacing `YOUR_SECRET_HERE` with the secret you created when setting up the Storage by Zapier integration. Remember that this is not your dbt Cloud secret. - -The code below will validate the authenticity of the request, then send a [`trigger run` command to the dbt Cloud API](https://docs.getdbt.com/dbt-cloud/api-v2-legacy#tag/Jobs/operation/triggerRun) for the given job ID. - -```python -import hashlib -import hmac -import json - -#replace with the Job ID you want to run -target_job_id = 12345 - -auth_header = input_data['auth_header'] -raw_body = input_data['raw_body'] - -# Access secret credentials -secret_store = StoreClient('YOUR_SECRET_HERE') -hook_secret = secret_store.get('DBT_WEBHOOK_KEY') -api_token = secret_store.get('DBT_CLOUD_SERVICE_TOKEN') - -# Validate the webhook came from dbt Cloud -signature = hmac.new(hook_secret.encode('utf-8'), raw_body.encode('utf-8'), hashlib.sha256).hexdigest() - -if signature != auth_header: - raise Exception("Calculated signature doesn't match contents of the Authorization header. This webhook may not have been sent from dbt Cloud.") - -full_body = json.loads(raw_body) -hook_data = full_body['data'] - -if hook_data['runStatus'] == "Success": - - # Trigger a new run with the dbt Cloud Admin API - url = f'https://cloud.getdbt.com/api/v2/accounts/{full_body['accountId']}/jobs/{target_job_id}/run' - - body = {'cause': f"Triggered by Zapier because {hook_data['jobName']} Run #{hook_data['runId']} completed successfully"} - headers = {'Authorization': f'Token {api_token}'} - response = requests.post(url, json=body, headers=headers) - response.raise_for_status() - -return -``` - -## Test and deploy - -When you're happy with it, remember to ensure that your `account_id` is no longer hardcoded, then publish your Zap. diff --git a/website/package-lock.json b/website/package-lock.json index 282056e5922..864954c3e8d 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -16,6 +16,7 @@ "@stoplight/elements": "^7.7.17", "@svgr/webpack": "^6.0.0", "axios": "^0.27.2", + "canvas-confetti": "^1.9.2", "classnames": "^2.3.1", "clsx": "^1.1.1", "color": "^3.1.2", @@ -8780,6 +8781,15 @@ } ] }, + "node_modules/canvas-confetti": { + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/canvas-confetti/-/canvas-confetti-1.9.2.tgz", + "integrity": "sha512-6Xi7aHHzKwxZsem4mCKoqP6YwUG3HamaHHAlz1hTNQPCqXhARFpSXnkC9TWlahHY5CG6hSL5XexNjxK8irVErg==", + "funding": { + "type": "donate", + "url": "https://www.paypal.me/kirilvatev" + } + }, "node_modules/caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", @@ -32098,6 +32108,11 @@ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001470.tgz", "integrity": "sha512-065uNwY6QtHCBOExzbV6m236DDhYCCtPmQUCoQtwkVqzud8v5QPidoMr6CoMkC2nfp6nksjttqWQRRh75LqUmA==" }, + "canvas-confetti": { + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/canvas-confetti/-/canvas-confetti-1.9.2.tgz", + "integrity": "sha512-6Xi7aHHzKwxZsem4mCKoqP6YwUG3HamaHHAlz1hTNQPCqXhARFpSXnkC9TWlahHY5CG6hSL5XexNjxK8irVErg==" + }, "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", diff --git a/website/package.json b/website/package.json index b0105102359..1c361319eb1 100644 --- a/website/package.json +++ b/website/package.json @@ -19,6 +19,7 @@ "@stoplight/elements": "^7.7.17", "@svgr/webpack": "^6.0.0", "axios": "^0.27.2", + "canvas-confetti": "^1.9.2", "classnames": "^2.3.1", "clsx": "^1.1.1", "color": "^3.1.2", diff --git a/website/src/components/confetti/index.js b/website/src/components/confetti/index.js new file mode 100644 index 00000000000..85aeaef46fb --- /dev/null +++ b/website/src/components/confetti/index.js @@ -0,0 +1,39 @@ +import React from 'react'; +import confetti from 'canvas-confetti'; + +const ConfettiTrigger = ({ children }) => { + const triggerConfetti = (event) => { + if (event?.target?.closest('a, h2, h3')) { + // Do nothing if the click is on these elements + return; + } + // config for the confetti w spread operator + const confettiCount = 200; + const confettiSettings = { + spread: 70, + startVelocity: 30, + scalar: 0.7, + gravity: 1.5, + decay: 0.9, + } + + // Launch confetti from multiple points + for (let i = 0; i < 5; i++) { + confetti({ + angle: 60, + particleCount: confettiCount / 5, + origin: { y: 0.6, x: (i + 1) * 0.2 - 0.1 }, + zIndex: 9999, + ...confettiSettings, + }); + } + }; + // add an OnClick event to trigger the confetti instead of listing EventListeners with useEffect + return ( + <div onClick={triggerConfetti} style={{ width: '100%' }}> + {children} {/* Render children passed to the component */} + </div> + ); +}; + +export default ConfettiTrigger; diff --git a/website/src/components/confetti/styles.modules.css b/website/src/components/confetti/styles.modules.css new file mode 100644 index 00000000000..e69de29bb2d diff --git a/website/src/css/custom.css b/website/src/css/custom.css index 2f938605c51..38389a71b53 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -2090,6 +2090,7 @@ h2.anchor.clicked a.hash-link:before { display: none; } + @media (max-width: 996px) { .quickstart-container { flex-direction: column; diff --git a/website/src/theme/MDXComponents/index.js b/website/src/theme/MDXComponents/index.js index 345145c780f..4104632d28c 100644 --- a/website/src/theme/MDXComponents/index.js +++ b/website/src/theme/MDXComponents/index.js @@ -45,6 +45,7 @@ import Icon from '@site/src/components/icon'; import Lifecycle from '@site/src/components/lifeCycle'; import detailsToggle from '@site/src/components/detailsToggle'; import expandable from '@site/src/components/expandable'; +import ConfettiTrigger from '@site/src/components/confetti/'; const MDXComponents = { head: MDXHead, @@ -96,5 +97,6 @@ const MDXComponents = { Lifecycle: Lifecycle, detailsToggle: detailsToggle, expandable: expandable, + ConfettiTrigger: ConfettiTrigger, }; export default MDXComponents; diff --git a/website/vercel.json b/website/vercel.json index 377cc635561..fc275cff56c 100644 --- a/website/vercel.json +++ b/website/vercel.json @@ -574,7 +574,12 @@ }, { "source": "/guides/orchestration/webhooks/zapier-new-cloud-job", - "destination": "/guides/zapier-new-cloud-job", + "destination": "/docs/deploy/deploy-jobs#trigger-on-job-completion--", + "permanent": true + }, + { + "source": "/guides/zapier-new-cloud-job", + "destination": "/docs/deploy/deploy-jobs#trigger-on-job-completion--", "permanent": true }, {