diff --git a/docs/content/api/modules.json.gz b/docs/content/api/modules.json.gz index 3930da2c16840..ff01ad79f9f48 100644 Binary files a/docs/content/api/modules.json.gz and b/docs/content/api/modules.json.gz differ diff --git a/docs/content/api/searchindex.json.gz b/docs/content/api/searchindex.json.gz index 1faf44f410954..0f631e5efb09d 100644 Binary files a/docs/content/api/searchindex.json.gz and b/docs/content/api/searchindex.json.gz differ diff --git a/docs/content/api/sections.json.gz b/docs/content/api/sections.json.gz index aa1367b7d9f80..4c08dcab91d61 100644 Binary files a/docs/content/api/sections.json.gz and b/docs/content/api/sections.json.gz differ diff --git a/docs/content/dagster-plus/insights/integrating-bigquery-and-dbt.mdx b/docs/content/dagster-plus/insights/integrating-bigquery-and-dbt.mdx index 3aca79d7e7c66..ba03d220a2567 100644 --- a/docs/content/dagster-plus/insights/integrating-bigquery-and-dbt.mdx +++ b/docs/content/dagster-plus/insights/integrating-bigquery-and-dbt.mdx @@ -35,18 +35,13 @@ To complete the steps in this guide, you'll need: -First, instrument the Dagster function with `dbt_with_bigquery_insights`: +First, append to the dbt CLI call in your Dagster function: ```python -from dagster_cloud.dagster_insights import dbt_with_bigquery_insights - - @dbt_assets(...) def my_asset(context: AssetExecutionContext, dbt: DbtCliResource): - # Typically you have a `yield from dbt_resource.cli(...)`. - # Wrap the original call with `dbt_with_bigquery_insights` as below. - dbt_cli_invocation = dbt_resource.cli(["build"], context=context) - yield from dbt_with_bigquery_insights(context, dbt_cli_invocation) + # Chain `with_insights` after any other metadata fetch, e.g. `fetch_row_count` + yield from dbt_resource.cli(["build"], context=context).stream().with_insights() ``` This passes through all underlying events and emits additional with BigQuery cost metrics. These metrics are obtained by querying the underlying `INFORMATION_SCHEMA.JOBS` table, using the BigQuery client from the dbt adapter. @@ -54,20 +49,15 @@ This passes through all underlying events and emits additional -First, instrument the op function with `dbt_with_bigquery_insights`: +First, append to the dbt CLI call in your Dagster op function: ```python -from dagster_cloud.dagster_insights import dbt_with_bigquery_insights - - @op(out={}) def my_dbt_op(context: OpExecutionContext, dbt: DbtCliResource): - # Typically you have a `yield from dbt_resource.cli(...)`. - # Wrap the original call with `dbt_with_bigquery_insights` as below. - dbt_cli_invocation = dbt.cli( + # Chain `with_insights` after any other metadata fetch, e.g. `fetch_row_count` + yield from dbt.cli( ["build"], context=context, manifest=dbt_manifest_path - ) - yield from dbt_with_bigquery_insights(context, dbt_cli_invocation) + ).stream().with_insights() @job def my_dbt_job(): @@ -110,7 +100,7 @@ width={3454} height={1338} /> -The BigQuery cost metric is based off of the bytes billed for queries wrapped with `dbt_with_bigquery_insights`, based on a unit price of $6.25 USD per TiB. +The BigQuery cost metric is based off of the bytes billed for queries wrapped with `with_insights`, based on a unit price of $6.25 USD per TiB. --- diff --git a/docs/content/dagster-plus/insights/integrating-snowflake-and-dbt.mdx b/docs/content/dagster-plus/insights/integrating-snowflake-and-dbt.mdx index 113c27957e7e9..b2d964f77805d 100644 --- a/docs/content/dagster-plus/insights/integrating-snowflake-and-dbt.mdx +++ b/docs/content/dagster-plus/insights/integrating-snowflake-and-dbt.mdx @@ -35,18 +35,13 @@ To complete the steps in this guide, you'll need: -First, instrument the Dagster function with `dbt_with_snowflake_insights`: +First, append to the dbt CLI call in your Dagster function: ```python -from dagster_cloud.dagster_insights import dbt_with_snowflake_insights - - @dbt_assets(...) def my_asset(context: AssetExecutionContext, dbt: DbtCliResource): - # Typically you have a `yield from dbt_resource.cli(...)`. - # Wrap the original call with `dbt_with_snowflake_insights` as below. - dbt_cli_invocation = dbt_resource.cli(["build"], context=context) - yield from dbt_with_snowflake_insights(context, dbt_cli_invocation) + # Chain `with_insights` after any other metadata fetch, e.g. `fetch_row_count` + yield from dbt_resource.cli(["build"], context=context).stream().with_insights() ``` This passes through all underlying events and emits an for each asset materialization. The observation contains the dbt invocation ID and unique ID recorded in the Dagster event log. @@ -54,20 +49,15 @@ This passes through all underlying events and emits an -First, instrument the op function with `dbt_with_snowflake_insights`: +First, append to the dbt CLI call in your Dagster op function: ```python -from dagster_cloud.dagster_insights import dbt_with_snowflake_insights - - @op(out={}) def my_dbt_op(context: OpExecutionContext, dbt: DbtCliResource): - # Typically you have a `yield from dbt_resource.cli(...)`. - # Wrap the original call with `dbt_with_snowflake_insights` as below. - dbt_cli_invocation = dbt.cli( + # Chain `with_insights` after any other metadata fetch, e.g. `fetch_row_count` + yield from dbt.cli( ["build"], context=context, manifest=dbt_manifest_path - ) - yield from dbt_with_snowflake_insights(context, dbt_cli_invocation) + ).stream().with_insights() @job def my_dbt_job(): diff --git a/docs/next/public/objects.inv b/docs/next/public/objects.inv index 048165c370bad..00f04269cfa08 100644 Binary files a/docs/next/public/objects.inv and b/docs/next/public/objects.inv differ