diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 78e23ec9287..fad32a7e224 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,7 +1,7 @@ # Lower entries on this list take precedence # Anything unmatched by selectors below -* @annafil @runleonarun +* @runleonarun # Cloud docs and images are assigned to Leona /website/docs/docs/dbt-cloud/dbt-cloud-changelog.md @runleonarun @@ -10,15 +10,18 @@ /website/static/img/docs/dbt-cloud/ @runleonarun # Blog content assigned to Developer experience team -/website/blog/ @dbt-labs/devex +/website/blog/ @runleonarun + # Adapter & Package Development Docs -/website/docs/docs/available-adapters.md/ @runleonarun @amychen1776 @dataders -/website/docs/reference/warehouse-profiles/ @runleonarun @amychen1776 @dataders -/website/docs/guides/building-packages @runleonarun @amychen1776 @dataders @dbeatty10 +/website/docs/docs/supported-data-platforms.md/ @runleonarun @dataders +/website/docs/reference/warehouse-profiles/ @runleonarun @dataders +/website/docs/reference/resource-configs/ @runleonarun @dataders +/website/docs/guides/building-packages @runleonarun @amychen1776 @dataders @dbeatty10 /website/docs/contributing/building-a-new-adapter @runleonarun @dataders @dbeatty10 /website/docs/contributing/testing-a-new-adapter @runleonarun @dataders @dbeatty10 /website/docs/guides/creating-new-materializations @runleonarun @dataders @dbeatty10 /website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/ @runleonarun @dataders @dbeatty10 + diff --git a/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml b/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml new file mode 100644 index 00000000000..f138b9e4e06 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml @@ -0,0 +1,54 @@ +name: Contribute to the dbt Developer Blog +description: > + For proposing a new post on the dbt Developer Blog. +labels: ["content","developer blog"] +body: + - type: markdown + attributes: + value: | + We're excited to hear about your idea for the dbt Developer Blog. This template will help lay out the proposed post and then we will work with on next steps! + + - type: input + id: contact + attributes: + label: Contact Details + description: How can we get in touch with you? + placeholder: your preferred email and/or dbt Slack handle + validations: + required: true + + - type: checkboxes + id: read-contribution + attributes: + label: I have read the dbt Developer Blog contribution guidelines. + description: You can find the contribution guide [here](https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/developer-blog.md) + options: + - label: I have read the dbt Developer Blog contribution guidelines. + validations: + required: true + + - type: checkboxes + id: author_type + attributes: + label: Which of these best describes you? + options: + - label: I am a dbt Community member or partner contributing to the Developer Blog + - label: I work for dbt Labs and am creating this issue for a community or marketing approved piece. + validations: + + + - type: textarea + attributes: + label: > + What is the topic of your post? + description: | + Please provide a short (~ 1 paragraph) summary as well as who this post is targeted towards (ie people interested in learning more about dbt snapshots or advanced Jinja users) + validations: + required: true + + - type: textarea + attributes: + label: Link to an initial outline. + description: Please link to a short outline in Notion, or Google Docs + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/direct-from-marketing.yml b/.github/ISSUE_TEMPLATE/direct-from-marketing.yml deleted file mode 100644 index 4f9417894c1..00000000000 --- a/.github/ISSUE_TEMPLATE/direct-from-marketing.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Add marketing blog content -description: > - For adding marketing-approved content to the DevBlog. -labels: ["content","developer blog"] -body: - - type: markdown - attributes: - value: | - Let's get some content up! - - This issue template is for dbt Labs employees who are adding marketing-approved content to the docs repo. - - - type: input - id: contact - attributes: - label: Contact Details - description: How can we get in touch with you if we need more info? - placeholder: your preferred email and/or dbt Slack handle - validations: - required: true - - - type: checkboxes - id: from-marketing - attributes: - label: This issue is being made to create a marketing-approved piece. - description: > - This content will typically fall under glossary pages, community recap events, and devblog content for campaigns. Please reach out to @johnblust if there's any confusion here. - options: - - label: I work for dbt Labs and am creating this issue for a marketing-approved piece. - validations: - required: true - - - type: textarea - attributes: - label: Link to the initial outlines we will develop through this Issue. - description: Please link to intial outline in FigJam, Notion, or Google Docs you are using to guide your work. - validations: - required: true \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/promote-discussion-to-blog.yml b/.github/ISSUE_TEMPLATE/promote-discussion-to-blog.yml deleted file mode 100644 index 8c78d65a397..00000000000 --- a/.github/ISSUE_TEMPLATE/promote-discussion-to-blog.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Promote Discussion to Blog Content -description: > - For promoting a Discussion that's been approved for outlining to an Issue so it can be crafted into - new content on the Developer Blog section of the Developer Hub. -labels: ["content","developer blog"] -body: - - type: markdown - attributes: - value: | - Let's write an article together! - - The foundational requirement of this Issue will be an initial outline. This should arrange the content you've laid out in the original Discussion into a structural flow that looks something like this [initial outline example forthcoming]. - - type: input - id: contact - attributes: - label: Contact Details - description: How can we get in touch with you if we need more info? - placeholder: your preferred email and/or dbt Slack handle - validations: - required: true - - - - type: checkboxes - id: from-discussion - attributes: - label: This issue is being created to promote an approved Discussion. - description: > - Creating new content for the Developer Blog should start as a Discussion. Please confirm that you've followed the Discussion path and - the Discussion being promoted has been approved with the label 'promote to developer blog issue'. - options: - - label: I am promoting this issue from an approved Discussion and will link it below. - validations: - required: true - - - type: textarea - attributes: - label: Link to the Discussion this Issue is promoting. - description: Please link to the Discussion this Issue is originating from. - validations: - required: true - - - type: textarea - attributes: - label: > - If you're promoting this from an 'I want to write about...' Discussion, you can copy over the answers to the following questions. Otherwise, - please answer these questions below - description: | - 1. What is the main problem you are solving? What is your solution? _This should help form your core thesis._ - 2. Why should the reader care about this problem? Why is your solution the right one? _This should help form your specific target audience._ - 3. Can you list the steps of your solution for the reader here? _This should help you form the overall narrative arc and sketch out an example use case to illustrate it._ - - Optional - - 4. Are there any resources that helped inspire or inform your idea? (eg slack discussions, articles, external product docs, etc. -- if so please link) - 5. Are there other existing solutions that solve the problem, and if so, how is this solution better or different? If so please share any links here. - validations: - required: true - - - type: markdown - attributes: - value: | - In addition to the initial questions from the Discussion, which should form the body and arc of your article, the following questions will help you round out your initial outline with additional context. - - 1. Is there anything the reader needs to know before they start? - 2. Are there any tool & skill prerequisites the reader should be aware of? - 3. Are there any drawbacks or tradeoffs to your solution that the reader should know about? Given tradeoffs, can you explain in greater detail the circumstances in which your solution is best deployed? - 4. Are there any additional resources you can point the reader to to give them greater context or preparation on the problem and/or solution? - 5. Can you describe what a (or a few) successful outcomes of your solution could look like? - - Use the answers to these questions to strengthen an example use case that will support the arc of your narrative. Lay this out in the introduction of your outline, use it to illustrate your headings throughout, and circle back to it in a conclusion. - - Include these introduction and conclusion sections to wrap the steps from your Discussion in your initial outline. - - - type: textarea - attributes: - label: Link to the initial outline we will develop through this Issue. - description: Please link to the inital outline in FigJam, Notion, or Google Docs - validations: - required: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 3f9070c8237..5ae7e9f4d6d 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -2,6 +2,8 @@ ## To-do before merge @@ -14,8 +16,8 @@ Include any notes about things that need to happen before this PR is merged, e.g ## Prerelease docs If this change is related to functionality in a prerelease version of dbt (delete if not applicable): -- [ ] I've added versioning components, as described in ["Versioning Docs"](../contributing/versioningdocs.md) -- [ ] I've added a note to the prerelease version's [Migration Guide](../website/docs/docs/guides/migration-guide) +- [ ] I've added versioning components, as described in ["Versioning Docs"](https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/versioningdocs.md) +- [ ] I've added a note to the prerelease version's [Migration Guide](https://github.com/dbt-labs/docs.getdbt.com/tree/current/website/docs/guides/migration/versions) ## Checklist If you added new pages (delete if not applicable): @@ -25,3 +27,4 @@ If you added new pages (delete if not applicable): If you removed existing pages (delete if not applicable): - [ ] The page has been removed from `website/sidebars.js` - [ ] An entry has been added to `_redirects` +- [ ] [Run link testing](https://github.com/dbt-labs/docs.getdbt.com#running-the-cypress-tests-locally) to update the links that point to the deleted page diff --git a/.github/workflows/create_next_pr.yml b/.github/workflows/create_next_pr.yml new file mode 100644 index 00000000000..becef0db654 --- /dev/null +++ b/.github/workflows/create_next_pr.yml @@ -0,0 +1,18 @@ +on: + push: + branches: + - "current" + +jobs: + pull-request: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: pull-request + uses: repo-sync/pull-request@v2 + with: + source_branch: "current" + destination_branch: "next" + pr_title: "Merge current branch into next" + pr_body: "*An automated PR to keep the next branch up to date with current*" + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/cypress_tests.yml b/.github/workflows/cypress_tests.yml new file mode 100644 index 00000000000..1973e0ef43e --- /dev/null +++ b/.github/workflows/cypress_tests.yml @@ -0,0 +1,47 @@ +name: cypress tests + +on: + pull_request: + branches: + - current + +jobs: + start-workflow: + name: start workflow + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: install node + uses: actions/setup-node@v2 + with: + node-version: '16.13.1' + + - name: npm install + run: cd website && npm ci + + - name: setup env + run: | + PR_NUMBER=$(jq --raw-output .pull_request.number ${{ github.event_path }}) + echo "PR_NUMBER=${PR_NUMBER}" >> ${GITHUB_ENV} + + - name: run cypress e2e + uses: cypress-io/github-action@v4 + with: + command: npx cypress run + browser: chrome + working-directory: website + spec: cypress/e2e/*.cy.js + wait-on: https://deploy-preview-${{ env.PR_NUMBER }}--docs-getdbt-com.netlify.app + wait-on-timeout: 600 + env: + CYPRESS_BASE_URL: https://deploy-preview-${{ env.PR_NUMBER }}--docs-getdbt-com.netlify.app + + - name: Screenshot artifact upload + uses: actions/upload-artifact@v2 + if: failure() + with: + name: cypress-screenshots + path: cypress/screenshots \ No newline at end of file diff --git a/.github/workflows/labelsizer.yml b/.github/workflows/labelsizer.yml index 2fbca704911..d1ad5776597 100644 --- a/.github/workflows/labelsizer.yml +++ b/.github/workflows/labelsizer.yml @@ -1,14 +1,24 @@ -name: Add size label to PR -on: - pull_request_target +name: labeler + +on: [pull_request_target] jobs: - build: - name: Add Size Label to PR + labeler: runs-on: ubuntu-latest + name: Label the PR size steps: - - name: Add Size Label to PR - uses: WizardOhio24/label-pr-size-action@v1.0.1 - with: + - uses: codelytv/pr-size-labeler@v1.8.1 + with: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - size-label-color: '10, size: x-small | 50, size: small | 250, size: medium | 500, size: large | 100000, size: x-large' + xs_label: 'size: x-small' + xs_max_size: '10' + s_label: 'size: small' + s_max_size: '50' + m_label: 'size: medium' + m_max_size: '250' + l_label: 'size: large' + l_max_size: '100000' + xl_label: 'size: x-large' + fail_if_xl: 'false' + files_to_ignore: '' + diff --git a/.gitignore b/.gitignore index 5395ea795d6..172fef2f8e3 100755 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,10 @@ website/build/ website/yarn.lock website/node_modules website/i18n/* + +# Local vs code +.vscode +# Local Netlify folder +.netlify + +.vscode diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000000..1e7c2ea79e0 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,9 @@ +{ + "spellright.language": [ + "en" + ], + "spellright.documentTypes": [ + "latex", + "plaintext" + ] +} \ No newline at end of file diff --git a/README.md b/README.md index dcf72f89e55..83aa4f7e5bc 100644 --- a/README.md +++ b/README.md @@ -1,52 +1,79 @@ _We use [docusaurus](https://v2.docusaurus.io/) to power [docs.getdbt.com](https://docs.getdbt.com/)._ #### Table of Contents + +* [Code of Conduct](#Code-of-conduct) * [Contributing](#contributing) * [Writing content](#writing-content) * [Running the docs site locally](#running-the-docs-site-locally) +# Code of conduct + +Please review the dbt docs contributors [code of conduct](https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/contributor-code-of-conduct.md). +Creating an inclusive and equitable environment for our documents is more important than any other aspect. Syntax errors can be corrected, but trust, once lost, is difficult to gain back. + # Contributing We welcome contributions from community members to this repo: -- **Fixes**: When you notice an error, you can use the `Edit this page` button at the bottom of each page to suggest a change. +- **Fixes**: When you notice an error, you can use the `Edit this page` button at the bottom of each page to suggest a change. - **New documentation**: If you contributed code in [dbt-core](https://github.com/dbt-labs/dbt-core), we encourage you to also write the docs here! Please reach out in the dbt community if you need help finding a place for these docs. - **Major rewrites**: You can [file an issue](https://github.com/dbt-labs/docs.getdbt.com/issues/new?assignees=&labels=content%2Cimprovement&template=improve-docs.yml) or [start a discussion](https://github.com/dbt-labs/docs.getdbt.com/discussions) to propose ideas for a content area that requires attention. -You can use components documented in the [docusaurus library](https://v2.docusaurus.io/docs/markdown-features/). +You can use components documented in the [docusaurus library](https://v2.docusaurus.io/docs/markdown-features/). # Writing content -When writing content, you should refer to the style guide (TBD) and [content types](/contributing/content-types.md) to help you understand our writing standards and how we break down information in the product documentaion. +The dbt Labs docs are written in Markdown and sometimes HTML. When writing content, refer to the [style guide](https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/content-style-guide.md) and [content types](/contributing/content-types.md) to help you understand our writing standards and how we break down information in the product documentation. + +## SME and editorial reviews -## Using the content style guide +All PRs that are submitted will be reviewed by the dbt Labs Docs team for editorial review. -[TBD] a style guide to help you write in a consistent, accessible, and inclusive style. +Content that is submitted by our users and the open-source community are also reviewed by our dbt Labs subject matter experts (SMEs) to help ensure technical accuracy. -## Versioning content -We now provide dynamic versioning in the dbt Labs product documentation. To learn more about how to version the docs for dbt Core, see [Managing available versions](/contributing/versioningdocs.md). +## Versioning and single-sourcing content -## Adding content to the glossary +We now enable you to reuse content between different docs pages, version pages, and establish product variables in the dbt Labs product documentation. To learn more about how to single source content between versions, product variables, and other content, see [Single-sourcing content](/contributing/single-sourcing-content.md). -[TBD] a how-to guide on the programming behind glossary entries. +## Adding tabbed components to a page + +You can add code snippets and other content in a tabbed view. To learn more about adding tabbed components, see [Adding page components](/contributing/adding-page-components.md). # Running the Docs site locally You can click a link available in a netlify bot PR comment to see and review your changes rendered on a staging server. You are also able to see and review your proposed modifications locally on your computer. Our setup instructions use [homebrew](https://brew.sh/): -## prerequisites -* Install [Xcode Command Line Tools](https://developer.apple.com/download/more/); you'll likely need an AppleID for this. -* Install [homebrew](https://brew.sh/). +## Prerequisites + +* (Mac Terminal) Install [Xcode Command Line Tools](https://developer.apple.com/download/more/) + - Open a terminal window, run `xcode-select --install`, and follow the on-screen prompts in the pop-up window. +* (Mac and Linux) Install [homebrew](https://brew.sh/) + - Copy and paste `/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"` in to a terminal window and follow the prompts. Once the installation has completed, follow the **Next Steps** instructions listed in terminal. +* (Windows) Install [Node.js](https://nodejs.org/en/download/) -1. Install `node`: `brew install node` -2. Clone this repo: `git clone git@github.com:fishtown-analytics/docs.getdbt.com.git` +1. (Mac and Linux only) Install `node`: `brew install node` +2. Clone this repo: `git clone https://github.com/dbt-labs/docs.getdbt.com.git` 3. `cd` into the repo: `cd docs.getdbt.com` 4. `cd` into the `website` subdirectory: `cd website` -5. Install the required node packages: `npm install` +5. Install the required node packages: `npm install` (optional — install any updates) 6. Build the website: `npm start` 7. Before pushing your changes to a branch, check that all links work by using the `make build` script. -Advisory: -- Currently an `npm install` produces a number of dependency warnings, in particular several claiming that `docusaurus/core` is missing. Rest assured, this message is a red herring. As of writing this, no 2.0.0 package exists, so you won't have much luck trying to install it. Feel free to ignore those warnings. +Advisory: - If you run into an `fatal error: 'vips/vips8' file not found` error when you run `npm install`, you may need to run `brew install vips`. Warning: this one will take a while -- go ahead and grab some coffee! +## Running the Cypress tests locally + +Method 1: Utilizing the Cypress GUI +1. `cd` into the repo: `cd docs.getdbt.com` +2. `cd` into the `website` subdirectory: `cd website` +3. Install the required node packages: `npm install` +4. Run `npx cypress open` to open the Cypress GUI, and choose `E2E Testing` as the Testing Type, before finally selecting your browser and clicking `Start E2E testing in {broswer}` +5. Click on a test and watch it run! + +Method 2: Running the Cypress E2E tests headlessly +1. `cd` into the repo: `cd docs.getdbt.com` +2. `cd` into the `website` subdirectory: `cd website` +3. Install the required node packages: `npm install` +4. Run `npx cypress run` diff --git a/_redirects b/_redirects index 097e9cc3946..aab9b67f89c 100644 --- a/_redirects +++ b/_redirects @@ -1,303 +1,527 @@ -# redirect next -https://next.docs.getdbt.com/* https://docs.getdbt.com/:splat 301! +/docs/running-a-dbt-project/running-dbt-in-production /docs/deploy/deployments 301 +/docs/running-a-dbt-project/profile /docs/get-started/connection-profiles 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications /docs/deploy/job-notifications 301 +/docs/dbt-cloud/using-dbt-cloud /docs/develop/develop-in-the-cloud 301 +/docs/dbt-cloud/january-2020-pricing-updates https://www.getdbt.com/pricing/ 301 +/docs/dbt-cloud/dbt-cloud-enterprise https://www.getdbt.com/pricing/ 301 +/docs/dbt-cloud/cloud-quickstart /docs/get-started/getting-started/set-up-dbt-cloud 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud /docs/develop/getting-started/getting-set-up 301 +/docs/building-a-dbt-project/archival /docs/build/snapshots 301 +/docs/about/license /community/resources/contributor-license-agreements 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository /docs/collaborate/git/managed-repository 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database /docs/get-started/connect-your-database 301 +/docs/dbt-cloud/release-notes /docs/dbt-versions/dbt-cloud-release-notes 301 +/docs/dbt-cloud/dbt-cloud-enterprise/audit-log /docs/collaborate/manage-access/audit-log 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-setting-up-bigquery-oauth /docs/collaborate/manage-access/set-up-bigquery-oauth 301 +/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth /docs/collaborate/manage-access/set-up-snowflake-oauth 301 +/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta /docs/collaborate/manage-access/set-up-sso-okta 301 +/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory /docs/collaborate/manage-access/set-up-sso-azure-active-directory 301 +/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite /docs/collaborate/manage-access/set-up-sso-google-workspace 301 +/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-saml-2.0 /docs/collaborate/manage-access/set-up-sso-saml-2.0 301 +/docs/dbt-cloud/dbt-cloud-enterprise/sso-overview /docs/collaborate/manage-access/sso-overview 301 +/docs/dbt-cloud/access-control/enterprise-permissions /docs/collaborate/manage-access/enterprise-permissions 301 +/docs/dbt-cloud/access-control/self-service-permissions /docs/collaborate/manage-access/self-service-permissions 301 +/docs/dbt-cloud/access-control/cloud-seats-and-users /docs/collaborate/manage-access/seats-and-users 301 +/docs/dbt-cloud/access-control/access-control-overview /docs/collaborate/manage-access/about-access 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation /docs/collaborate/build-and-view-your-docs 301 +/docs/building-a-dbt-project/documentation /docs/collaborate/documentation 301 +/docs/building-a-dbt-project/managing-environments /docs/collaborate/environments 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url /docs/collaborate/git/import-a-project-by-git-url 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure /docs/collaborate/git/authenticate-azure 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure /docs/collaborate/git/setup-azure 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-azure-devops /docs/collaborate/git/connect-azure-devops 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab /docs/collaborate/git/connect-gitlab 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application /docs/collaborate/git/connect-github 301 +/docs/dbt-cloud/cloud-ide/handling-merge-conflicts /docs/collaborate/git/resolve-merge-conflicts 301 +/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide /docs/collaborate/cloud-build-and-view-your-docs 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories /docs/collaborate/git/pr-template 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration /docs/deploy/cloud-ci-job 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles /docs/deploy/dashboard-status-tiles 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness /docs/deploy/source-freshness 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-notifications /docs/deploy/job-notifications 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule /docs/deploy/job-triggers 301 +/docs/dbt-cloud/deployments/airgapped-deployment /docs/deploy/airgapped-deployment 301 +/docs/dbt-cloud/deployments/single-tenant-deployment /docs/deploy/single-tenant 301 +/docs/dbt-cloud/deployments/multi-tenant-deployment /docs/deploy/multi-tenant 301 +/docs/dbt-cloud/deployments/deployment-architecture /docs/deploy/architecture 301 +/docs/dbt-cloud/deployments/deployment-overview /docs/deploy/deployments 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name /docs/build/custom-target-names 301 +/docs/building-a-dbt-project/building-models/using-custom-aliases /docs/build/custom-aliases 301 +/docs/building-a-dbt-project/building-models/using-custom-databases /docs/build/custom-databases 301 +/docs/building-a-dbt-project/building-models/using-custom-schemas /docs/build/custom-schemas 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposures /docs/dbt-cloud-apis/metadata-schema-exposures 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposure /docs/dbt-cloud-apis/metadata-schema-exposure 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-tests /docs/dbt-cloud-apis/metadata-schema-tests 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-test /docs/dbt-cloud-apis/metadata-schema-test 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-snapshots /docs/dbt-cloud-apis/metadata-schema-snapshots 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seeds /docs/dbt-cloud-apis/metadata-schema-seeds 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seed /docs/dbt-cloud-apis/metadata-schema-seed 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-sources /docs/dbt-cloud-apis/metadata-schema-sources 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-source /docs/dbt-cloud-apis/metadata-schema-source 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metrics /docs/dbt-cloud-apis/metadata-schema-metrics 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metric /docs/dbt-cloud-apis/metadata-schema-metric 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-modelByEnv /docs/dbt-cloud-apis/metadata-schema-modelByEnv 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-models /docs/dbt-cloud-apis/metadata-schema-models 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-model /docs/dbt-cloud-apis/metadata-schema-model 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-querying /docs/dbt-cloud-apis/metadata-querying 301 +/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview /docs/dbt-cloud-apis/metadata-api 301 +/docs/dbt-cloud/dbt-cloud-api/admin-cloud-api /docs/dbt-cloud-apis/admin-cloud-api 301 +/docs/dbt-cloud/dbt-cloud-api/service-tokens /docs/dbt-cloud-apis/service-tokens 301 +/docs/dbt-cloud/dbt-cloud-api/user-tokens /docs/dbt-cloud-apis/user-tokens 301 +/docs/dbt-cloud/dbt-cloud-api/cloud-apis /docs/dbt-cloud-apis/overview 301 +/docs/building-a-dbt-project/hooks-operations /docs/build/hooks-operations 301 +/docs/building-a-dbt-project/analyses /docs/build/analyses 301 +/docs/building-a-dbt-project/package-management /docs/build/packages 301 +/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables /docs/build/environment-variables 301 +/docs/building-a-dbt-project/building-models/using-variables /docs/build/project-variables 301 +/docs/building-a-dbt-project/jinja-macros /docs/build/jinja-macros 301 +/docs/building-a-dbt-project/building-models/configuring-incremental-models /docs/build/incremental-models 301 +/docs/building-a-dbt-project/building-models/materializations /docs/build/materializations 301 +/docs/building-a-dbt-project/tests /docs/build/tests 301 +/docs/building-a-dbt-project/metrics /docs/build/metrics 301 +/docs/building-a-dbt-project/exposures /docs/build/exposures 301 +/docs/building-a-dbt-project/snapshots /docs/build/snapshots 301 +/docs/building-a-dbt-project/seeds /docs/build/seeds 301 +/docs/building-a-dbt-project/building-models /docs/build/sql-models 301 +/docs/building-a-dbt-project/using-sources /docs/build/sources 301 +/docs/building-a-dbt-project/projects /docs/build/projects 301 +/docs/building-a-dbt-project/building-models/python-models /docs/build/python-models 301 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions /docs/dbt-versions/upgrade-core-in-cloud 301 +/docs/core-versions /docs/dbt-versions/core 301 +/docs/dbt-cloud/cloud-dbt-cloud-support /docs/dbt-support 301 +/docs/about/viewpoint /community/resources/viewpoint 301 +/docs/viewpoint /community/resources/viewpoint 301 +/dbt-cli/configure-your-profile /docs/get-started/connection-profiles 301 +/docs/running-a-dbt-project/using-the-cli /docs/get-started/about-the-cli 301 +/dbt-cli/install/from-source /docs/get-started/source-install 301 +/dbt-cli/install/docker /docs/get-started/docker-install 301 +/dbt-cli/install/pip /docs/get-started/pip-install 301 +/dbt-cli/install/homebrew /docs/get-started/homebrew-install 301 +/dbt-cli/install/overview /docs/get-started/installation 301 +/docs/dbt-cloud/cloud-ide/the-dbt-ide /docs/get-started/dbt-cloud-features 301 +/useful*components https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/adding-page-components.md 302 +/guides/legacy/managing-environments /docs/building-a-dbt-project/managing-environments 302 +/docs/running-a-dbt-project/dbt-api /docs/introduction 301 +/img/docs/dbt-cloud/dbt-cloud-enterprise/icon.png https://www.getdbt.com/ui/img/dbt-icon.png 301! +/dbt-cli/installation-guides/centos /docs/get-started/installation 301 +/dbt-cli/installation-guides/centos /docs/get-started/installation 301 +/dbt-cli/installation-guides/install-from-source /dbt-cli/install/from-source 302 +/dbt-cli/installation-guides/macos /docs/get-started/installation 302 +/dbt-cli/installation-guides/ubuntu-debian /docs/get-started/installation 302 +/dbt-cli/installation-guides/windows /docs/get-started/installation 302 +/dbt-cli/installation /docs/get-started/installation 302 +/dbt-jinja-functions /reference/dbt-jinja-functions 302 +/docs /docs/introduction 302 +/docs/adapter /docs/writing-code-in-dbt/jinja-context/adapter 302 +/docs/analyses /docs/building-a-dbt-project/analyses 302 +/docs/api-variable /docs/writing-code-in-dbt/api-variable 302 +/docs/archival /docs/building-a-dbt-project/archival 302 +/docs/artifacts /docs/dbt-cloud/using-dbt-cloud/artifacts 302 +/docs/bigquery-configs /reference/resource-configs/bigquery-configs 302 +/reference/resource-properties/docs /reference/resource-configs/docs 302 +/docs/building-a-dbt-project/building-models/bigquery-configs /reference/resource-configs/bigquery-configs 302 +/docs/building-a-dbt-project/building-models/configuring-models /reference/model-configs +/docs/building-a-dbt-project/building-models/enable-and-disable-models /reference/resource-configs/enabled 302 +/docs/building-a-dbt-project/building-models/redshift-configs /reference/resource-configs/redshift-configs 302 +/docs/building-a-dbt-project/building-models/snowflake-configs /reference/resource-configs/snowflake-configs 302 +/docs/building-a-dbt-project/building-models/spark-configs /reference/resource-configs/spark-configs 302 +/docs/building-a-dbt-project/building-models/tags /reference/resource-configs/tags 302 +/docs/building-a-dbt-project/building-models/using-sql-headers /reference/resource-configs/sql_header 302 +/docs/building-a-dbt-project/dbt-projects /docs/building-a-dbt-project/projects 302 +/docs/building-a-dbt-project/dbt-projects/configuring-query-comments /reference/project-configs/query-comment 302 +/docs/building-a-dbt-project/dbt-projects/configuring-quoting /reference/project-configs/quoting 302 +/docs/building-a-dbt-project/dbt-projects/creating-a-project /docs/building-a-dbt-project/projects#creating-a-dbt-project 302 +/docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions /reference/project-configs/require-dbt-version 302 +/docs/building-a-dbt-project/dbt-projects/use-an-existing-project /docs/building-a-dbt-project/projects#using-an-existing-project 302 +/docs/building-a-dbt-project/hooks /docs/building-a-dbt-project/hooks-operations 302 +/docs/building-a-dbt-project/testing-and-documentation /docs/building-a-dbt-project/tests 302 +/docs/building-a-dbt-project/testing-and-documentation/documentation /docs/building-a-dbt-project/testing-and-documentation/documentation 302 +/docs/building-a-dbt-project/testing-and-documentation/documentation-website /docs/building-a-dbt-project/testing-and-documentation/documentation 302 +/docs/building-a-dbt-project/testing-and-documentation/schemayml-files /reference/declaring-properties 302 +/docs/building-a-dbt-project/testing-and-documentation/testing /docs/building-a-dbt-project/tests 302 +/docs/building-a-dbt-project/using-operations /docs/building-a-dbt-project/hooks-operations 302 +/docs/building-a-new-adapter /docs/contributing/building-a-new-adapter 302 +/docs/building-models /docs/building-a-dbt-project/building-models 302 +/docs/building-packages /guides/legacy/building-packages 302 +/docs/centos /dbt-cli/installation 302 +/docs/clean /reference/commands/clean 302 +/docs/cloud-choosing-a-dbt-version /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version 302 +/docs/cloud-configuring-dbt-cloud /docs/dbt-cloud/cloud-configuring-dbt-cloud 302 +/docs/cloud-enabling-continuous-integration-with-github /docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github 302 +/docs/cloud-generating-documentation /docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation 302 +/docs/cloud-import-a-project-by-git-url /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url 302 +/docs/cloud-installing-the-github-application /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application 302 +/docs/cloud-managing-permissions /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-managing-permissions 302 +/docs/cloud-overview /docs/dbt-cloud/cloud-overview 302 +/docs/cloud-quickstart /docs/dbt-cloud/cloud-quickstart 302 +/docs/cloud-seats-and-users /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-seats-and-users 302 +/docs/cloud-setting-a-custom-target-name /docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name 302 +/docs/cloud-snapshotting-source-freshness /docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness 302 +/docs/cloud-supported-dbt-versions /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version 302 +/docs/cloud-using-a-custom-cron-schedule /docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule 302 +/docs/cloud-using-a-managed-repository /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository 302 +/docs/cmd-docs /reference/commands/cmd-docs 302 +/docs/command-line-interface /reference/dbt-commands 302 +/docs/compile /reference/commands/compile 302 +/docs/config /docs/writing-code-in-dbt/jinja-context/config 302 +/docs/configure-your-profile /dbt-cli/configure-your-profile 302 +/docs/configuring-incremental-models /docs/building-a-dbt-project/building-models/configuring-incremental-models 302 +/docs/configuring-models /reference/model-configs 302 +/docs/configuring-query-comments /docs/building-a-dbt-project/dbt-projects/configuring-query-comments 302 +/docs/configuring-quoting /docs/building-a-dbt-project/dbt-projects/configuring-quoting 302 +/docs/configuring-resources-from-the-project-file /docs/building-a-dbt-project/dbt-projects/configuring-resources-from-the-project-file 302 +/docs/connecting-your-database /docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database 302 +/docs/contributor-license-agreements /docs/contributing/contributor-license-agreements 302 +/docs/creating-a-project /docs/building-a-dbt-project/dbt-projects/creating-a-project 302 +/docs/creating-new-materializations /guides/legacy/creating-new-materializations 302 +/docs/custom-schema-tests /guides/legacy/writing-custom-generic-tests 302 +/docs/dbt-api /docs/running-a-dbt-project/dbt-api 302 +/docs/dbt-cloud-enterprise /docs/dbt-cloud/dbt-cloud-enterprise 302 +/docs/dbt-cloud/cloud-configuring-repositories /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories 302 +/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version /docs/dbt-versions/upgrade-core-in-cloud 301 +/docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions/ /docs/dbt-cloud/access-control/enterprise-permissions 302 +/docs/dbt-cloud/on-premises/architecture /dbt-cloud/on-premises/dependencies 302 +/docs/dbt-projects /docs/building-a-dbt-project/dbt-projects 302 +/docs/dbt_projectyml-file /docs/building-a-dbt-project/dbt-projects/dbt_projectyml-file 302 +/docs/debug /reference/commands/debug 302 +/docs/debug-method /docs/writing-code-in-dbt/jinja-context/debug-method 302 +/docs/deps /reference/commands/deps 302 +/docs/doc /docs/writing-code-in-dbt/jinja-context/doc 302 +/docs/documentation /docs/building-a-dbt-project/documentation 302 +/docs/documentation-website /docs/building-a-dbt-project/documentation 302 +/docs/dont-nest-your-curlies /docs/building-a-dbt-project/dont-nest-your-curlies 302 +/docs/enable-and-disable-models /reference/resource-configs/enabled 302 +/docs/enterprise-permissions /docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions 302 +/docs/env_var /docs/writing-code-in-dbt/jinja-context/env_var 302 +/docs/exceptions /docs/writing-code-in-dbt/jinja-context/exceptions 302 +/docs/execute /docs/writing-code-in-dbt/jinja-context/execute 302 +/docs/exit-codes /reference/exit-codes 302 +/docs/flags /docs/writing-code-in-dbt/jinja-context/flags 302 +/docs/fromjson /docs/writing-code-in-dbt/jinja-context/fromjson 302 +/docs/getting-started-with-jinja /docs/building-a-dbt-project/jinja-macros 302 +/docs/global-cli-flags /reference/global-cli-flags 302 +/docs/graph /docs/writing-code-in-dbt/jinja-context/graph 302 +/docs/guides/building-packages /guides/legacy/building-packages 302 +/docs/guides/creating-new-materializations /guides/legacy/creating-new-materializations 302 +/docs/guides/debugging-errors /guides/legacy/debugging-errors 302 +/docs/guides/debugging-schema-names /guides/legacy/debugging-schema-names 302 +/docs/guides/getting-help /guides/legacy/getting-help 302 +/docs/guides/managing-environments /guides/legacy/managing-environments 302 +/docs/guides/navigating-the-docs /guides/legacy/navigating-the-docs 302 +/docs/guides/understanding-state /guides/legacy/understanding-state 302 +/docs/guides/videos /guides/legacy/videos 302 +/docs/guides/writing-custom-generic-tests /guides/legacy/writing-custom-generic-tests 302 +/docs/guides/writing-custom-schema-tests /guides/legacy/writing-custom-generic-tests 302 +/docs/guides/best-practices#choose-your-materializations-wisely /guides/legacy/best-practices#choose-your-materializations-wisely 302 +/docs/guides/best-practices#version-control-your-dbt-project /guides/legacy/best-practices#version-control-your-dbt-project 302 +/docs/best-practices /guides/legacy/best-practices 302 +/docs/guides/best-practices /guides/best-practices 302 +/docs/hooks /docs/building-a-dbt-project/hooks-operations 302 +/docs/init /reference/commands/init 302 +/docs/install-from-source /dbt-cli/installation 302 +/docs/installation /dbt-cli/installation 302 +/docs/invocation_id /docs/writing-code-in-dbt/jinja-context/invocation_id 302 +/docs/jinja-context /docs/writing-code-in-dbt/jinja-context 302 +/docs/license /docs/about/license 302 +/docs/list /reference/commands/list 302 +/docs/log /docs/writing-code-in-dbt/jinja-context/log 302 +/docs/macos /dbt-cli/installation 302 +/docs/macros /docs/building-a-dbt-project/macros 302 +/docs/managing-environments /guides/legacy/managing-environments 302 +/docs/materializations /docs/building-a-dbt-project/building-models/materializations 302 +/docs/model-selection-syntax /reference/node-selection/syntax 302 +/docs/modules /docs/writing-code-in-dbt/jinja-context/modules 302 +/docs/on-run-end-context /docs/writing-code-in-dbt/jinja-context/on-run-end-context 302 +/docs/overview /docs/introduction 302 +/docs/package-management /docs/building-a-dbt-project/package-management 302 +/docs/profile-bigquery /reference/warehouse-profiles/bigquery-profile 302 +/docs/profile-mssql /reference/warehouse-profiles/mssql-profile 302 +/docs/profile-postgres /reference/warehouse-profiles/postgres-profile 302 +/docs/profile-presto /reference/warehouse-profiles/presto-profile 302 +/docs/profile-redshift /reference/warehouse-profiles/redshift-profile 302 +/docs/profile-snowflake /reference/warehouse-profiles/snowflake-profile 302 +/docs/profile-spark /reference/warehouse-profiles/spark-profile 302 +/docs/redshift-configs /reference/resource-configs/redshift-configs 302 +/docs/spark-configs /reference/resource-configs/spark-configs 302 +/docs/redshift-v2 /reference/warehouse-profiles/redshift-profile 302 +/docs/ref /docs/writing-code-in-dbt/jinja-context/ref 302 +/docs/requiring-specific-dbt-versions /docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions 302 +/docs/return /docs/writing-code-in-dbt/jinja-context/return 302 +/docs/rpc /reference/commands/rpc 302 +/docs/run /reference/commands/run 302 +/docs/run-operation /reference/commands/run-operation 302 +/docs/run_query /docs/writing-code-in-dbt/jinja-context/run_query 302 +/docs/run_started_at /docs/writing-code-in-dbt/jinja-context/run_started_at 302 +/docs/running-a-dbt-project/command-line-interface /reference/dbt-commands 302 +/docs/running-a-dbt-project/command-line-interface/clean /reference/commands/clean 302 +/docs/running-a-dbt-project/command-line-interface/cmd-docs /reference/commands/cmd-docs 302 +/docs/running-a-dbt-project/command-line-interface/compile /reference/commands/compile 302 +/docs/running-a-dbt-project/command-line-interface/debug /reference/commands/debug 302 +/docs/running-a-dbt-project/command-line-interface/deps /reference/commands/deps 302 +/docs/running-a-dbt-project/command-line-interface/exit-codes /reference/exit-codes 302 +/docs/running-a-dbt-project/command-line-interface/global-cli-flags /reference/global-cli-flags 302 +/docs/running-a-dbt-project/command-line-interface/init /reference/commands/init 302 +/docs/running-a-dbt-project/command-line-interface/list /reference/commands/list 302 +/docs/running-a-dbt-project/command-line-interface/model-selection-syntax /reference/model-selection-syntax 302 +/docs/running-a-dbt-project/command-line-interface/rpc /reference/commands/rpc 302 +/docs/running-a-dbt-project/command-line-interface/run /reference/commands/run 302 +/docs/running-a-dbt-project/command-line-interface/run-operation /reference/commands/run-operation 302 +/docs/running-a-dbt-project/command-line-interface/seed /reference/commands/seed 302 +/docs/running-a-dbt-project/command-line-interface/snapshot /reference/commands/snapshot 302 +/docs/running-a-dbt-project/command-line-interface/source /reference/commands/source 302 +/docs/running-a-dbt-project/command-line-interface/test /reference/commands/test 302 +/docs/running-a-dbt-project/command-line-interface/version /reference/global-cli-flags#version 302 +/docs/running-a-dbt-project/using-the-command-line-interface /docs/running-a-dbt-project/using-the-cli 302 +/docs/running-a-dbt-project/using-the-command-line-interface/centos /dbt-cli/installation-guides/centos 302 +/docs/running-a-dbt-project/using-the-command-line-interface/configure-your-profile /dbt-cli/configure-your-profile 302 +/docs/running-a-dbt-project/using-the-command-line-interface/install-from-source /dbt-cli/installation-guides/install-from-source 302 +/docs/running-a-dbt-project/using-the-command-line-interface/installation /dbt-cli/installation 302 +/docs/running-a-dbt-project/using-the-command-line-interface/macos /dbt-cli/installation-guides/macos 302 +/docs/running-a-dbt-project/using-the-command-line-interface/ubuntu-debian /dbt-cli/installation-guides/ubuntu-debian 302 +/docs/running-a-dbt-project/using-the-command-line-interface/windows /dbt-cli/installation-guides/windows 302 +/docs/running-dbt-in-production /docs/running-a-dbt-project/running-dbt-in-production 302 +/docs/schema /docs/writing-code-in-dbt/jinja-context/schema 302 +/docs/schemas /docs/writing-code-in-dbt/jinja-context/schemas 302 +/docs/schemayml-files /reference/declaring-properties 302 +/docs/seed /reference/commands/seed 302 +/docs/seeds /docs/building-a-dbt-project/seeds 302 +/docs/setting-up-enterprise-sso-with-azure-active-directory /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory 302 +/docs/setting-up-snowflake-sso /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth 302 +/docs/setting-up-sso-with-google-gsuite /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite 302 +/docs/setting-up-sso-with-okta /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta 302 +/docs/slack-rules-of-the-road /docs/contributing/slack-rules-of-the-road 302 +/docs/snapshot /reference/commands/snapshot 302 +/docs/snapshots /docs/building-a-dbt-project/snapshots 302 +/docs/snowflake-configs /reference/resource-configs/snowflake-configs 302 +/docs/source /reference/commands/source 302 +/docs/statement-blocks /docs/writing-code-in-dbt/jinja-context/statement-blocks 302 +/docs/supported-databases/profile-bigquery /reference/bigquery-profile 302 +/docs/supported-databases/profile-mssql /reference/mssql-profile 302 +/docs/supported-databases/profile-postgres /reference/postgres-profile 302 +/docs/supported-databases/profile-presto /reference/presto-profile 302 +/docs/supported-databases/profile-redshift /reference/redshift-profile 302 +/docs/supported-databases/profile-snowflake /reference/snowflake-profile 302 +/docs/supported-databases/profile-spark /reference/spark-profile 302 +/docs/tags /reference/resource-configs/tags 302 +/docs/target /docs/writing-code-in-dbt/jinja-context/target 302 +/docs/test /reference/commands/test 302 +/docs/testing /docs/building-a-dbt-project/tests 302 +/docs/testing-and-documentation /docs/building-a-dbt-project/tests 302 +/docs/the-dbt-ide /docs/dbt-cloud/cloud-ide/the-dbt-ide 302 +/docs/this /docs/writing-code-in-dbt/jinja-context/this 302 +/docs/tojson /docs/writing-code-in-dbt/jinja-context/tojson 302 +/docs/ubuntu-debian /dbt-cli/installation 302 +/docs/use-an-existing-project /docs/building-a-dbt-project/dbt-projects/use-an-existing-project 302 +/docs/using-custom-aliases /docs/building-a-dbt-project/building-models/using-custom-aliases 302 +/docs/using-custom-database /docs/building-a-dbt-project/building-models/using-custom-databases 302 +/docs/using-custom-schemas /docs/building-a-dbt-project/building-models/using-custom-schemas 302 +/docs/using-dbt-cloud /docs/dbt-cloud/using-dbt-cloud 302 +/docs/using-jinja /guides/getting-started/learning-more/using-jinja 302 +/docs/using-operations /docs/building-a-dbt-project/hooks-operations 302 +/docs/using-sources /docs/building-a-dbt-project/using-sources 302 +/docs/using-sql-headers /reference/resource-configs/sql_header 302 +/docs/using-the-command-line-interface /docs/running-a-dbt-project/using-the-cli 302 +/docs/using-the-dbt-ide /docs/running-a-dbt-project/using-the-dbt-ide 302 +/docs/using-variables /docs/building-a-dbt-project/building-models/using-variables 302 +/docs/var /docs/writing-code-in-dbt/jinja-context/var 302 +/docs/version /reference/global-cli-flags#version 302 +/docs/videos /guides/legacy/videos 302 +/docs/windows /dbt-cli/installation 302 +/docs/writing-code-in-dbt/class-reference /reference/dbt-classes 302 +/docs/writing-code-in-dbt/extending-dbts-programming-environment/creating-new-materializations /guides/legacy/creating-new-materializations 302 +/docs/writing-code-in-dbt/extending-dbts-programming-environment/custom-schema-tests /guides/legacy/writing-custom-schema-tests 302 +/docs/writing-code-in-dbt/getting-started-with-jinja /docs/building-a-dbt-project/jinja-macros 302 +/docs/writing-code-in-dbt/jinja-context/adapter /reference/dbt-jinja-functions/adapter 302 +/docs/writing-code-in-dbt/jinja-context/as_text /reference/dbt-jinja-functions/as_text 302 +/docs/writing-code-in-dbt/jinja-context/builtins /reference/dbt-jinja-functions/builtins 302 +/docs/writing-code-in-dbt/jinja-context/config /reference/dbt-jinja-functions/config 302 +/docs/writing-code-in-dbt/jinja-context/dbt-project-yml-context /reference/dbt-jinja-functions/dbt-project-yml-context 302 +/docs/writing-code-in-dbt/jinja-context/dbt_version /reference/dbt-jinja-functions/dbt_version 302 +/docs/writing-code-in-dbt/jinja-context/debug-method /reference/dbt-jinja-functions/debug-method 302 +/docs/writing-code-in-dbt/jinja-context/doc /reference/dbt-jinja-functions/doc 302 +/docs/writing-code-in-dbt/jinja-context/env_var /reference/dbt-jinja-functions/env_var 302 +/docs/writing-code-in-dbt/jinja-context/exceptions /reference/dbt-jinja-functions/exceptions 302 +/docs/writing-code-in-dbt/jinja-context/execute /reference/dbt-jinja-functions/execute 302 +/docs/writing-code-in-dbt/jinja-context/flags /reference/dbt-jinja-functions/flags 302 +/docs/writing-code-in-dbt/jinja-context/fromjson /reference/dbt-jinja-functions/fromjson 302 +/docs/writing-code-in-dbt/jinja-context/fromyaml /reference/dbt-jinja-functions/fromyaml 302 +/docs/writing-code-in-dbt/jinja-context/graph /reference/dbt-jinja-functions/graph 302 +/docs/writing-code-in-dbt/jinja-context/invocation_id /reference/dbt-jinja-functions/invocation_id 302 +/docs/writing-code-in-dbt/jinja-context/log /reference/dbt-jinja-functions/log 302 +/docs/writing-code-in-dbt/jinja-context/modules /reference/dbt-jinja-functions/modules 302 +/docs/writing-code-in-dbt/jinja-context/on-run-end-context /reference/dbt-jinja-functions/on-run-end-context 302 +/docs/writing-code-in-dbt/jinja-context/profiles-yml-context /reference/dbt-jinja-functions/profiles-yml-context 302 +/docs/writing-code-in-dbt/jinja-context/project_name /reference/dbt-jinja-functions/project_name 302 +/docs/writing-code-in-dbt/jinja-context/ref /reference/dbt-jinja-functions/ref 302 +/docs/writing-code-in-dbt/jinja-context/return /reference/dbt-jinja-functions/return 302 +/docs/writing-code-in-dbt/jinja-context/run_query /reference/dbt-jinja-functions/run_query 302 +/docs/writing-code-in-dbt/jinja-context/run_started_at /reference/dbt-jinja-functions/run_started_at 302 +/docs/writing-code-in-dbt/jinja-context/schema /reference/dbt-jinja-functions/schema 302 +/docs/writing-code-in-dbt/jinja-context/schemas /reference/dbt-jinja-functions/schemas 302 +/docs/writing-code-in-dbt/jinja-context/source /reference/dbt-jinja-functions/source 302 +/docs/writing-code-in-dbt/jinja-context/statement-blocks /reference/dbt-jinja-functions/statement-blocks 302 +/docs/writing-code-in-dbt/jinja-context/target /reference/dbt-jinja-functions/target 302 +/docs/writing-code-in-dbt/jinja-context/this /reference/dbt-jinja-functions/this 302 +/docs/writing-code-in-dbt/jinja-context/tojson /reference/dbt-jinja-functions/tojson 302 +/docs/writing-code-in-dbt/jinja-context/toyaml /reference/dbt-jinja-functions/toyaml 302 +/docs/writing-code-in-dbt/jinja-context/var /reference/dbt-jinja-functions/var 302 +/docs/writing-code-in-dbt/macros /docs/building-a-dbt-project/jinja-macros 302 +/docs/writing-code-in-dbt/using-jinja /guides/getting-started/learning-more/using-jinja 302 +/faqs/getting-help/ /guides/legacy/getting-help 302 +/migration-guide/upgrading-to-0-17-0 /guides/migration/versions 302 +/migration-guide/upgrading-to-0-18-0 /guides/migration/versions 302 +/reference/accounts /dbt-cloud/api 302 +/reference/api /dbt-cloud/api 302 +/reference/connections /dbt-cloud/api 302 +/reference/data-test-configs /reference/test-configs 302 +/reference/declaring-properties /reference/configs-and-properties 302 +/reference/dbt-artifacts /reference/artifacts/dbt-artifacts 302 +/reference/environments /dbt-cloud/api 302 +/reference/events /reference/events-logging 302 +/reference/jobs /dbt-cloud/api 302 +/reference/model-selection-syntax /reference/node-selection/syntax 302 +/reference/project-configs/on-run-end /reference/project-configs/on-run-start-on-run-end 302 +/reference/project-configs/on-run-start /reference/project-configs/on-run-start-on-run-end 302 +/reference/repositories /dbt-cloud/api 302 +/reference/resource-configs/post-hook /reference/resource-configs/pre-hook-post-hook 302 +/reference/resource-configs/pre-hook /reference/resource-configs/pre-hook-post-hook 302 +/reference/resource-properties/tags /reference/resource-configs/tags 302 +/reference/runs /dbt-cloud/api 302 +/reference/using-the-dbt-cloud-api /dbt-cloud/api 302 +https://tutorial.getdbt.com/* https://docs.getdbt.com/:splat 301! +/reference/model-selection-syntax/#test-selection-examples /reference/node-selection/test-selection-examples 302 +/docs/building-a-dbt-project/building-models/using-custom-database /docs/building-a-dbt-project/building-models/using-custom-databases 302 +/dbt-cloud/api /dbt-cloud/api-v2 302 +/reference/project-configs/source-paths /reference/project-configs/model-paths 302 +/reference/project-configs/data-paths /reference/project-configs/seed-paths 302 +/reference/project-configs/modules-paths /reference/project-configs/packages-install-path 302 +/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications /docs/dbt-cloud/using-dbt-cloud/cloud-notifications 302 +/reference/warehouse-profiles/presto-profile /reference/profiles.yml 302 +/setting-up /guides/getting-started/getting-set-up/setting-up-bigquery 302 +/tutorial/setting-up /guides/getting-started 302 +/tutorial/test-and-document-your-project /guides/getting-started/building-your-first-project/test-and-document-your-project 302 +/tutorial/build-your-first-models /guides/getting-started/building-your-first-project/build-your-first-models 302 +/tutorial/deploy-your-project /guides/getting-started/building-your-first-project/schedule-a-job 302 +/tutorial/using-jinja /guides/getting-started/learning-more/using-jinja 302 +/tutorial/refactoring-legacy-sql /guides/getting-started/learning-more/refactoring-legacy-sql 302 +/tutorial/2b-create-a-project-dbt-cli.md /guides/getting-started/learning-more/getting-started-dbt-core 302 +/tutorial/create-a-project-dbt-cli /guides/getting-started/learning-more/getting-started-dbt-core 302 +/tutorial/2a-create-a-project-dbt-cloud.md /guides/getting-started 302 +/tutorial/create-a-project-dbt-cloud /guides/getting-started 302 +/tutorial/getting-started /guides/getting-started 302 +/docs/dbt-cloud/cloud-changelog /docs/dbt-cloud/release-notes 302 +/faqs/all /docs/faqs 301! +/faqs/_ /docs/faqs/:splat 301 +/tutorial/learning-more/_ /guides/getting-started/learning-more/:splat 301 +/tutorial/getting-set-up/\_ /guides/getting-started/getting-set-up/:splat 301 +/tutorial/building-your-first-project/\* /guides/getting-started/building-your-first-project/:splat 301 +/tutorial/refactoring-legacy-sql /guides/getting-started/learning-more/refactoring-legacy-sql 302 +/blog/change-data-capture-metrics /blog/change-data-capture 301 +/blog/model-timing-tab /blog/how-we-shaved-90-minutes-off-model 301 -/img/docs/dbt-cloud/dbt-cloud-enterprise/icon.png https://www.getdbt.com/ui/img/dbt-icon.png 301! -/dbt-cli/installation-guides/centos /dbt-cli/install/overview 302 -/dbt-cli/installation-guides/centos /dbt-cli/install/overview 302 -/dbt-cli/installation-guides/install-from-source /dbt-cli/install/from-source 302 -/dbt-cli/installation-guides/macos /dbt-cli/install/overview 302 -/dbt-cli/installation-guides/ubuntu-debian /dbt-cli/install/overview 302 -/dbt-cli/installation-guides/windows /dbt-cli/install/overview 302 -/dbt-cli/installation /dbt-cli/install/overview 302 -/docs /docs/introduction 302 -/docs/adapter /docs/writing-code-in-dbt/jinja-context/adapter 302 -/docs/analyses /docs/building-a-dbt-project/analyses 302 -/docs/api-variable /docs/writing-code-in-dbt/api-variable 302 -/docs/archival /docs/building-a-dbt-project/archival 302 -/docs/artifacts /docs/dbt-cloud/using-dbt-cloud/artifacts 302 -/docs/best-practices /docs/guides/best-practices 302 -/docs/bigquery-configs /reference/resource-configs/bigquery-configs 302 -/docs/building-a-dbt-project/building-models/bigquery-configs /reference/resource-configs/bigquery-configs 302 -/docs/building-a-dbt-project/building-models/configuring-models /reference/model-configs -/docs/building-a-dbt-project/building-models/enable-and-disable-models /reference/resource-configs/enabled 302 -/docs/building-a-dbt-project/building-models/redshift-configs /reference/resource-configs/redshift-configs 302 -/docs/building-a-dbt-project/building-models/snowflake-configs /reference/resource-configs/snowflake-configs 302 -/docs/building-a-dbt-project/building-models/spark-configs /reference/resource-configs/spark-configs 302 -/docs/building-a-dbt-project/building-models/tags /reference/resource-configs/tags 302 -/docs/building-a-dbt-project/building-models/using-sql-headers /reference/resource-configs/sql_header 302 -/docs/building-a-dbt-project/dbt-projects /docs/building-a-dbt-project/projects 302 -/docs/building-a-dbt-project/dbt-projects/configuring-query-comments /reference/project-configs/query-comment 302 -/docs/building-a-dbt-project/dbt-projects/configuring-quoting /reference/project-configs/quoting 302 -/docs/building-a-dbt-project/dbt-projects/creating-a-project /docs/building-a-dbt-project/projects#creating-a-dbt-project 302 -/docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions /reference/project-configs/require-dbt-version 302 -/docs/building-a-dbt-project/dbt-projects/use-an-existing-project /docs/building-a-dbt-project/projects#using-an-existing-project 302 -/docs/building-a-dbt-project/hooks /docs/building-a-dbt-project/hooks-operations 302 -/docs/building-a-dbt-project/testing-and-documentation /docs/building-a-dbt-project/tests 302 -/docs/building-a-dbt-project/testing-and-documentation/documentation /docs/building-a-dbt-project/testing-and-documentation/documentation 302 -/docs/building-a-dbt-project/testing-and-documentation/documentation-website /docs/building-a-dbt-project/testing-and-documentation/documentation 302 -/docs/building-a-dbt-project/testing-and-documentation/schemayml-files /reference/declaring-properties 302 -/docs/building-a-dbt-project/testing-and-documentation/testing /docs/building-a-dbt-project/tests 302 -/docs/building-a-dbt-project/using-operations /docs/building-a-dbt-project/hooks-operations 302 -/docs/building-a-new-adapter /docs/contributing/building-a-new-adapter 302 -/docs/building-models /docs/building-a-dbt-project/building-models 302 -/docs/building-packages /docs/guides/building-packages 302 -/docs/centos /dbt-cli/installation 302 -/docs/clean /reference/commands/clean 302 -/docs/cloud-choosing-a-dbt-version /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version 302 -/docs/cloud-configuring-dbt-cloud /docs/dbt-cloud/cloud-configuring-dbt-cloud 302 -/docs/cloud-enabling-continuous-integration-with-github /docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github 302 -/docs/cloud-generating-documentation /docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation 302 -/docs/cloud-import-a-project-by-git-url /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url 302 -/docs/cloud-installing-the-github-application /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application 302 -/docs/cloud-managing-permissions /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-managing-permissions 302 -/docs/cloud-overview /docs/dbt-cloud/cloud-overview 302 -/docs/cloud-quickstart /docs/dbt-cloud/cloud-quickstart 302 -/docs/cloud-seats-and-users /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-seats-and-users 302 -/docs/cloud-setting-a-custom-target-name /docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name 302 -/docs/cloud-snapshotting-source-freshness /docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness 302 -/docs/cloud-supported-dbt-versions /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version 302 -/docs/cloud-using-a-custom-cron-schedule /docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule 302 -/docs/cloud-using-a-managed-repository /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository 302 -/docs/cmd-docs /reference/commands/cmd-docs 302 -/docs/command-line-interface /reference/dbt-commands 302 -/docs/compile /reference/commands/compile 302 -/docs/config /docs/writing-code-in-dbt/jinja-context/config 302 -/docs/configure-your-profile /dbt-cli/configure-your-profile 302 -/docs/configuring-incremental-models /docs/building-a-dbt-project/building-models/configuring-incremental-models 302 -/docs/configuring-models /reference/model-configs 302 -/docs/configuring-query-comments /docs/building-a-dbt-project/dbt-projects/configuring-query-comments 302 -/docs/configuring-quoting /docs/building-a-dbt-project/dbt-projects/configuring-quoting 302 -/docs/configuring-resources-from-the-project-file /docs/building-a-dbt-project/dbt-projects/configuring-resources-from-the-project-file 302 -/docs/connecting-your-database /docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database 302 -/docs/contributor-license-agreements /docs/contributing/contributor-license-agreements 302 -/docs/creating-a-project /docs/building-a-dbt-project/dbt-projects/creating-a-project 302 -/docs/creating-new-materializations /docs/guides/creating-new-materializations 302 -/docs/custom-schema-tests /docs/guides/writing-custom-schema-tests 302 -/docs/dbt-api /docs/running-a-dbt-project/dbt-api 302 -/docs/dbt-cloud-enterprise /docs/dbt-cloud/dbt-cloud-enterprise 302 -/docs/dbt-cloud/cloud-configuring-repositories /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories 302 -/docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions/ /docs/dbt-cloud/access-control/enterprise-permissions 302 -/docs/dbt-cloud/on-premises/architecture /dbt-cloud/on-premises/dependencies 302 -/docs/dbt-projects /docs/building-a-dbt-project/dbt-projects 302 -/docs/dbt_projectyml-file /docs/building-a-dbt-project/dbt-projects/dbt_projectyml-file 302 -/docs/debug /reference/commands/debug 302 -/docs/debug-method /docs/writing-code-in-dbt/jinja-context/debug-method 302 -/docs/deps /reference/commands/deps 302 -/docs/doc /docs/writing-code-in-dbt/jinja-context/doc 302 -/docs/documentation /docs/building-a-dbt-project/documentation 302 -/docs/documentation-website /docs/building-a-dbt-project/documentation 302 -/docs/dont-nest-your-curlies /docs/building-a-dbt-project/dont-nest-your-curlies 302 -/docs/enable-and-disable-models /reference/resource-configs/enabled 302 -/docs/enterprise-permissions /docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions 302 -/docs/env_var /docs/writing-code-in-dbt/jinja-context/env_var 302 -/docs/exceptions /docs/writing-code-in-dbt/jinja-context/exceptions 302 -/docs/execute /docs/writing-code-in-dbt/jinja-context/execute 302 -/docs/exit-codes /reference/exit-codes 302 -/docs/flags /docs/writing-code-in-dbt/jinja-context/flags 302 -/docs/fromjson /docs/writing-code-in-dbt/jinja-context/fromjson 302 -/docs/getting-started-with-jinja /docs/building-a-dbt-project/jinja-macros 302 -/docs/global-cli-flags /reference/global-cli-flags 302 -/docs/graph /docs/writing-code-in-dbt/jinja-context/graph 302 -/docs/guides/migration-guide/upgrading-to-014 /docs/guides/migration-guide/upgrading-to-0-14-0 302 -/docs/guides/migration-guide/upgrading-from-0-10-to-0-11 /docs/guides/migration-guide/upgrading-to-0-11-0 302 -/docs/guides/migration-guide/upgrading-to-0-20-0 /docs/guides/migration-guide/upgrading-to-v0.20 302 -/docs/guides/migration-guide/upgrading-to-0-21-0 /docs/guides/migration-guide/upgrading-to-v0.21 302 -/docs/guides/migration-guide/upgrading-to-1-0-0 /docs/guides/migration-guide/upgrading-to-v1.0 302 -/docs/guides/writing-custom-schema-tests /docs/guides/writing-custom-generic-tests -/docs/hooks /docs/building-a-dbt-project/hooks-operations 302 -/docs/init /reference/commands/init 302 -/docs/install-from-source /dbt-cli/installation 302 -/docs/installation /dbt-cli/installation 302 -/docs/invocation_id /docs/writing-code-in-dbt/jinja-context/invocation_id 302 -/docs/jinja-context /docs/writing-code-in-dbt/jinja-context 302 -/docs/license /docs/about/license 302 -/docs/list /reference/commands/list 302 -/docs/log /docs/writing-code-in-dbt/jinja-context/log 302 -/docs/macos /dbt-cli/installation 302 -/docs/macros /docs/building-a-dbt-project/macros 302 -/docs/managing-environments /docs/guides/managing-environments 302 -/docs/materializations /docs/building-a-dbt-project/building-models/materializations 302 -/docs/model-selection-syntax /reference/node-selection/syntax 302 -/docs/modules /docs/writing-code-in-dbt/jinja-context/modules 302 -/docs/on-run-end-context /docs/writing-code-in-dbt/jinja-context/on-run-end-context 302 -/docs/overview /docs/introduction 302 -/docs/package-management /docs/building-a-dbt-project/package-management 302 -/docs/profile /docs/available-adapters 302 -/docs/profile-bigquery /reference/warehouse-profiles/bigquery-profile 302 -/docs/profile-mssql /reference/warehouse-profiles/mssql-profile 302 -/docs/profile-postgres /reference/warehouse-profiles/postgres-profile 302 -/docs/profile-presto /reference/warehouse-profiles/presto-profile 302 -/docs/profile-redshift /reference/warehouse-profiles/redshift-profile 302 -/docs/profile-snowflake /reference/warehouse-profiles/snowflake-profile 302 -/docs/profile-spark /reference/warehouse-profiles/spark-profile 302 -/docs/redshift-configs /reference/resource-configs/redshift-configs 302 -/docs/spark-configs /reference/resource-configs/spark-configs 302 -/docs/redshift-v2 /reference/warehouse-profiles/redshift-profile 302 -/docs/ref /docs/writing-code-in-dbt/jinja-context/ref 302 -/docs/requiring-specific-dbt-versions /docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions 302 -/docs/return /docs/writing-code-in-dbt/jinja-context/return 302 -/docs/rpc /reference/commands/rpc 302 -/docs/run /reference/commands/run 302 -/docs/run-operation /reference/commands/run-operation 302 -/docs/run_query /docs/writing-code-in-dbt/jinja-context/run_query 302 -/docs/run_started_at /docs/writing-code-in-dbt/jinja-context/run_started_at 302 -/docs/running-a-dbt-project/command-line-interface /reference/dbt-commands 302 -/docs/running-a-dbt-project/command-line-interface/clean /reference/commands/clean 302 -/docs/running-a-dbt-project/command-line-interface/cmd-docs /reference/commands/cmd-docs 302 -/docs/running-a-dbt-project/command-line-interface/compile /reference/commands/compile 302 -/docs/running-a-dbt-project/command-line-interface/debug /reference/commands/debug 302 -/docs/running-a-dbt-project/command-line-interface/deps /reference/commands/deps 302 -/docs/running-a-dbt-project/command-line-interface/exit-codes /reference/exit-codes 302 -/docs/running-a-dbt-project/command-line-interface/global-cli-flags /reference/global-cli-flags 302 -/docs/running-a-dbt-project/command-line-interface/init /reference/commands/init 302 -/docs/running-a-dbt-project/command-line-interface/list /reference/commands/list 302 -/docs/running-a-dbt-project/command-line-interface/model-selection-syntax /reference/model-selection-syntax 302 -/docs/running-a-dbt-project/command-line-interface/rpc /reference/commands/rpc 302 -/docs/running-a-dbt-project/command-line-interface/run /reference/commands/run 302 -/docs/running-a-dbt-project/command-line-interface/run-operation /reference/commands/run-operation 302 -/docs/running-a-dbt-project/command-line-interface/seed /reference/commands/seed 302 -/docs/running-a-dbt-project/command-line-interface/snapshot /reference/commands/snapshot 302 -/docs/running-a-dbt-project/command-line-interface/source /reference/commands/source 302 -/docs/running-a-dbt-project/command-line-interface/test /reference/commands/test 302 -/docs/running-a-dbt-project/command-line-interface/version /reference/global-cli-flags#version 302 -/docs/running-a-dbt-project/using-the-command-line-interface /docs/running-a-dbt-project/using-the-cli 302 -/docs/running-a-dbt-project/using-the-command-line-interface/centos /dbt-cli/installation-guides/centos 302 -/docs/running-a-dbt-project/using-the-command-line-interface/configure-your-profile /dbt-cli/configure-your-profile 302 -/docs/running-a-dbt-project/using-the-command-line-interface/install-from-source /dbt-cli/installation-guides/install-from-source 302 -/docs/running-a-dbt-project/using-the-command-line-interface/installation /dbt-cli/installation 302 -/docs/running-a-dbt-project/using-the-command-line-interface/macos /dbt-cli/installation-guides/macos 302 -/docs/running-a-dbt-project/using-the-command-line-interface/ubuntu-debian /dbt-cli/installation-guides/ubuntu-debian 302 -/docs/running-a-dbt-project/using-the-command-line-interface/windows /dbt-cli/installation-guides/windows 302 -/docs/running-dbt-in-production /docs/running-a-dbt-project/running-dbt-in-production 302 -/docs/schema /docs/writing-code-in-dbt/jinja-context/schema 302 -/docs/schemas /docs/writing-code-in-dbt/jinja-context/schemas 302 -/docs/schemayml-files /reference/declaring-properties 302 -/docs/seed /reference/commands/seed 302 -/docs/seeds /docs/building-a-dbt-project/seeds 302 -/docs/setting-up-enterprise-sso-with-azure-active-directory /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory 302 -/docs/setting-up-snowflake-sso /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth 302 -/docs/setting-up-sso-with-google-gsuite /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite 302 -/docs/setting-up-sso-with-okta /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta 302 -/docs/slack-rules-of-the-road /docs/contributing/slack-rules-of-the-road 302 -/docs/snapshot /reference/commands/snapshot 302 -/docs/snapshots /docs/building-a-dbt-project/snapshots 302 -/docs/snowflake-configs /reference/resource-configs/snowflake-configs 302 -/docs/source /reference/commands/source 302 -/docs/statement-blocks /docs/writing-code-in-dbt/jinja-context/statement-blocks 302 -/docs/supported-databases /docs/available-adapters 302 -/docs/supported-databases/profile-bigquery /reference/bigquery-profile 302 -/docs/supported-databases/profile-mssql /reference/mssql-profile 302 -/docs/supported-databases/profile-postgres /reference/postgres-profile 302 -/docs/supported-databases/profile-presto /reference/presto-profile 302 -/docs/supported-databases/profile-redshift /reference/redshift-profile 302 -/docs/supported-databases/profile-snowflake /reference/snowflake-profile 302 -/docs/supported-databases/profile-spark /reference/spark-profile 302 -/docs/tags /reference/resource-configs/tags 302 -/docs/target /docs/writing-code-in-dbt/jinja-context/target 302 -/docs/test /reference/commands/test 302 -/docs/testing /docs/building-a-dbt-project/tests 302 -/docs/testing-and-documentation /docs/building-a-dbt-project/tests 302 -/docs/the-dbt-ide /docs/dbt-cloud/cloud-ide/the-dbt-ide 302 -/docs/this /docs/writing-code-in-dbt/jinja-context/this 302 -/docs/tojson /docs/writing-code-in-dbt/jinja-context/tojson 302 -/docs/ubuntu-debian /dbt-cli/installation 302 -/docs/upgrading-to-0-14-1 /docs/guides/migration-guide/upgrading-to-0-14-1 302 -/docs/upgrading-to-0-16-0 /docs/guides/migration-guide/upgrading-to-0-16-0 302 -/docs/upgrading-to-014 /docs/guides/migration-guide/upgrading-to-014 302 -/docs/use-an-existing-project /docs/building-a-dbt-project/dbt-projects/use-an-existing-project 302 -/docs/using-custom-aliases /docs/building-a-dbt-project/building-models/using-custom-aliases 302 -/docs/using-custom-database /docs/building-a-dbt-project/building-models/using-custom-databases 302 -/docs/using-custom-schemas /docs/building-a-dbt-project/building-models/using-custom-schemas 302 -/docs/using-dbt-cloud /docs/dbt-cloud/using-dbt-cloud 302 -/docs/using-jinja /tutorial/using-jinja 302 -/docs/using-operations /docs/building-a-dbt-project/hooks-operations 302 -/docs/using-sources /docs/building-a-dbt-project/using-sources 302 -/docs/using-sql-headers /reference/resource-configs/sql_header 302 -/docs/using-the-command-line-interface /docs/running-a-dbt-project/using-the-cli 302 -/docs/using-the-dbt-ide /docs/running-a-dbt-project/using-the-dbt-ide 302 -/docs/using-variables /docs/building-a-dbt-project/building-models/using-variables 302 -/docs/var /docs/writing-code-in-dbt/jinja-context/var 302 -/docs/version /reference/global-cli-flags#version 302 -/docs/videos /docs/guides/videos 302 -/docs/viewpoint /docs/about/viewpoint 302 -/docs/windows /dbt-cli/installation 302 -/docs/writing-code-in-dbt/class-reference /reference/dbt-classes 302 -/docs/writing-code-in-dbt/extending-dbts-programming-environment/creating-new-materializations /docs/guides/creating-new-materializations 302 -/docs/writing-code-in-dbt/extending-dbts-programming-environment/custom-schema-tests /docs/guides/writing-custom-schema-tests 302 -/docs/writing-code-in-dbt/getting-started-with-jinja /docs/building-a-dbt-project/jinja-macros 302 -/docs/writing-code-in-dbt/jinja-context/adapter /reference/dbt-jinja-functions/adapter 302 -/docs/writing-code-in-dbt/jinja-context/as_text /reference/dbt-jinja-functions/as_text 302 -/docs/writing-code-in-dbt/jinja-context/builtins /reference/dbt-jinja-functions/builtins 302 -/docs/writing-code-in-dbt/jinja-context/config /reference/dbt-jinja-functions/config 302 -/docs/writing-code-in-dbt/jinja-context/dbt-project-yml-context /reference/dbt-jinja-functions/dbt-project-yml-context 302 -/docs/writing-code-in-dbt/jinja-context/dbt_version /reference/dbt-jinja-functions/dbt_version 302 -/docs/writing-code-in-dbt/jinja-context/debug-method /reference/dbt-jinja-functions/debug-method 302 -/docs/writing-code-in-dbt/jinja-context/doc /reference/dbt-jinja-functions/doc 302 -/docs/writing-code-in-dbt/jinja-context/env_var /reference/dbt-jinja-functions/env_var 302 -/docs/writing-code-in-dbt/jinja-context/exceptions /reference/dbt-jinja-functions/exceptions 302 -/docs/writing-code-in-dbt/jinja-context/execute /reference/dbt-jinja-functions/execute 302 -/docs/writing-code-in-dbt/jinja-context/flags /reference/dbt-jinja-functions/flags 302 -/docs/writing-code-in-dbt/jinja-context/fromjson /reference/dbt-jinja-functions/fromjson 302 -/docs/writing-code-in-dbt/jinja-context/fromyaml /reference/dbt-jinja-functions/fromyaml 302 -/docs/writing-code-in-dbt/jinja-context/graph /reference/dbt-jinja-functions/graph 302 -/docs/writing-code-in-dbt/jinja-context/invocation_id /reference/dbt-jinja-functions/invocation_id 302 -/docs/writing-code-in-dbt/jinja-context/log /reference/dbt-jinja-functions/log 302 -/docs/writing-code-in-dbt/jinja-context/modules /reference/dbt-jinja-functions/modules 302 -/docs/writing-code-in-dbt/jinja-context/on-run-end-context /reference/dbt-jinja-functions/on-run-end-context 302 -/docs/writing-code-in-dbt/jinja-context/profiles-yml-context /reference/dbt-jinja-functions/profiles-yml-context 302 -/docs/writing-code-in-dbt/jinja-context/project_name /reference/dbt-jinja-functions/project_name 302 -/docs/writing-code-in-dbt/jinja-context/ref /reference/dbt-jinja-functions/ref 302 -/docs/writing-code-in-dbt/jinja-context/return /reference/dbt-jinja-functions/return 302 -/docs/writing-code-in-dbt/jinja-context/run_query /reference/dbt-jinja-functions/run_query 302 -/docs/writing-code-in-dbt/jinja-context/run_started_at /reference/dbt-jinja-functions/run_started_at 302 -/docs/writing-code-in-dbt/jinja-context/schema /reference/dbt-jinja-functions/schema 302 -/docs/writing-code-in-dbt/jinja-context/schemas /reference/dbt-jinja-functions/schemas 302 -/docs/writing-code-in-dbt/jinja-context/source /reference/dbt-jinja-functions/source 302 -/docs/writing-code-in-dbt/jinja-context/statement-blocks /reference/dbt-jinja-functions/statement-blocks 302 -/docs/writing-code-in-dbt/jinja-context/target /reference/dbt-jinja-functions/target 302 -/docs/writing-code-in-dbt/jinja-context/this /reference/dbt-jinja-functions/this 302 -/docs/writing-code-in-dbt/jinja-context/tojson /reference/dbt-jinja-functions/tojson 302 -/docs/writing-code-in-dbt/jinja-context/toyaml /reference/dbt-jinja-functions/toyaml 302 -/docs/writing-code-in-dbt/jinja-context/var /reference/dbt-jinja-functions/var 302 -/docs/writing-code-in-dbt/macros /docs/building-a-dbt-project/jinja-macros 302 -/docs/writing-code-in-dbt/using-jinja /tutorial/using-jinja 302 -/faqs/getting-help/ /docs/guides/getting-help/ 302 -/reference/accounts /dbt-cloud/api 302 -/reference/api /dbt-cloud/api 302 -/reference/connections /dbt-cloud/api 302 -/reference/data-test-configs /reference/test-configs 302 -/reference/declaring-properties /reference/configs-and-properties 302 -/reference/dbt-artifacts /reference/artifacts/dbt-artifacts 302 -/reference/environments /dbt-cloud/api 302 -/reference/events /reference/events-logging 302 -/reference/jobs /dbt-cloud/api 302 -/reference/model-selection-syntax /reference/node-selection/syntax 302 -/reference/project-configs/on-run-end /reference/project-configs/on-run-start-on-run-end 302 -/reference/project-configs/on-run-start /reference/project-configs/on-run-start-on-run-end 302 -/reference/repositories /dbt-cloud/api 302 -/reference/resource-configs/post-hook /reference/resource-configs/pre-hook-post-hook 302 -/reference/resource-configs/pre-hook /reference/resource-configs/pre-hook-post-hook 302 -/reference/resource-properties/tags /reference/resource-configs/tags 302 -/reference/runs /dbt-cloud/api 302 -/reference/using-the-dbt-cloud-api /dbt-cloud/api 302 -https://tutorial.getdbt.com/* https://docs.getdbt.com/:splat 301! -/reference/model-selection-syntax/#test-selection-examples /reference/node-selection/test-selection-examples 302 -/docs/building-a-dbt-project/building-models/using-custom-database /docs/building-a-dbt-project/building-models/using-custom-databases 302 -/dbt-cloud/api /dbt-cloud/api-v2 302 -/reference/project-configs/source-paths /reference/project-configs/model-paths 302 -/reference/project-configs/data-paths /reference/project-configs/seed-paths 302 -/reference/project-configs/modules-paths /reference/project-configs/packages-install-path 302 -/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications /docs/dbt-cloud/using-dbt-cloud/cloud-notifications 302 -/reference/warehouse-profiles/presto-profile /reference/profiles.yml 302 -/tutorial/setting-up /tutorial/getting-started 302 -/tutorial/test-and-document-your-project /tutorial/building-your-first-project/test-and-document-your-project 302 -/tutorial/build-your-first-models /tutorial/building-your-first-project/build-your-first-models 302 -/tutorial/deploy-your-project /tutorial/building-your-first-project/schedule-a-job 302 -/tutorial/using-jinja /tutorial/learning-more/using-jinja 302 -/tutorial/refactoring-legacy-sql /tutorial/learning-more/refactoring-legacy-sql 302 -/tutorial/2b-create-a-project-dbt-cli.md /tutorial/learning-more/getting-started-dbt-core 302 -/tutorial/create-a-project-dbt-cli /tutorial/learning-more/getting-started-dbt-core 302 -/tutorial/2a-create-a-project-dbt-cloud.md /tutorial/getting-started 302 -/tutorial/create-a-project-dbt-cloud /tutorial/getting-started 302 -/docs/dbt-cloud/cloud-changelog /docs/dbt-cloud/release-notes 302 +# supported data platforms page + +/docs/profile /docs/supported-data-platforms 302 +/docs/available-adapters /docs/supported-data-platforms 302 +/docs/supported-databases /docs/supported-data-platforms 302 + +# migration and legacy guides + +/docs/guides/migration-guide/upgrading-to-0-14-0 /guides/migration/versions 302 +/docs/guides/migration-guide/upgrading-to-0-15-0 /guides/migration/versions 302 +/docs/guides/migration-guide/upgrading-to-0-16-0 /guides/migration/versions 302 +/docs/guides/migration-guide/upgrading-to-0-17-0 /guides/migration/versions 302 +/docs/guides/migration-guide/upgrading-to-0-18-0 /guides/migration/versions 302 +/docs/guides/migration-guide/upgrading-to-0-19-0 /guides/migration/versions 302 +/docs/guides/migration-guide/upgrading-from-0-10-to-0-11 /guides/migration/versions 302 +/docs/guides/migration-guide/upgrading-to-014 /guides/migration/versions 302 +/docs/upgrading-to-014 /guides/migration/versions 302 +/docs/upgrading-to-0-14-1 /guides/migration/versions 302 +/docs/upgrading-to-0-16-0 /guides/migration/versions 302 +/docs/guides/migration-guide/upgrading-to-0-20-0 /guides/migration/versions/upgrading-to-v0.20 302 +/docs/guides/migration-guide/upgrading-to-0-21-0 /guides/migration/versions/upgrading-to-v0.21 302 +/docs/guides/migration-guide/upgrading-to-1-0-0 /guides/migration/versions/upgrading-to-v1.0 302 +/docs/guides/migration-guide/upgrading-to-v1.0 /guides/migration/versions/upgrading-to-v1.0 302 +/docs/guides/getting-help /guides/legacy/getting-help 302 +/docs/guides/migration-guide/_ /guides/migration/versions/:splat 301! +/docs/guides/_ /guides/legacy/:splat 301! +docs/contributing/building-a-new-adapter /docs/contributing/adapter-development/3-building-a-new-adapter 302 +docs/contributing/testing-a-new-adapter /docs/contributing/adapter-development/4-testing-a-new-adapter 302 +docs/contributing/documenting-a-new-adapter /docs/contributing/adapter-development/5-documenting-a-new-adapter 302 + +/docs/dbt-cloud/using-dbt-cloud/cloud-metrics-layer /docs/use-dbt-semantic-layer/dbt-semantic-layer 301! +/docs/building-a-new-adapter /docs/contributing/adapter-development/3-building-a-new-adapter 301! +/reference/warehouse-profiles/impala-profile /reference/warehouse-setups/impala-setup 302 +/reference/warehouse-profiles/exasol-profile /reference/warehouse-setups/exasol-setup 302 +/reference/warehouse-profiles/layer-profile /reference/warehouse-setups/layer-setup 302 +/reference/warehouse-profiles/postgres-profile /reference/warehouse-setups/postgres-setup 302 +/reference/warehouse-profiles/greenplum-profile /reference/warehouse-setups/greenplum-setup 302 +/reference/warehouse-profiles/alloydb-profile /reference/warehouse-setups/alloydb-setup 302 +/reference/warehouse-profiles/azuresynapse-profile /reference/warehouse-setups/azuresynapse-setup 302 +/reference/warehouse-profiles/snowflake-profile /reference/warehouse-setups/snowflake-setup 302 +/reference/warehouse-profiles/rockset-profile /reference/warehouse-setups/rockset-setup 302 +/reference/warehouse-profiles/trino-profile /reference/warehouse-setups/trino-setup 302 +/reference/warehouse-profiles/glue-profile /reference/warehouse-setups/glue-setup 302 +/reference/warehouse-profiles/duckdb-profile /reference/warehouse-setups/duckdb-setup 302 +/reference/warehouse-profiles/vertica-profile /reference/warehouse-setups/vertica-setup 302 +/reference/warehouse-profiles/clickhouse-profile /reference/warehouse-setups/clickhouse-setup 302 +/reference/warehouse-profiles/athena-profile /reference/warehouse-setups/athena-setup 302 +/reference/warehouse-profiles/iomete-profile /reference/warehouse-setups/iomete-setup 302 +/reference/warehouse-profiles/mssql-profile /reference/warehouse-setups/mssql-setup 302 +/reference/warehouse-profiles/tidb-profile /reference/warehouse-setups/tidb-setup 302 +/reference/warehouse-profiles/materialize-profile /reference/warehouse-setups/materialize-setup 302 +/reference/warehouse-profiles/redshift-profile /reference/warehouse-setups/redshift-setup 302 +/reference/warehouse-profiles/databricks-profile /reference/warehouse-setups/databricks-setup 302 +/reference/warehouse-profiles/bigquery-profile /reference/warehouse-setups/bigquery-setup 302 +/reference/warehouse-profiles/dremio-profile /reference/warehouse-setups/dremio-setup 302 +/reference/warehouse-profiles/oracle-profile /reference/warehouse-setups/oracle-setup 302 +/reference/warehouse-profiles/teradata-profile /reference/warehouse-setups/teradata-setup 302 +/reference/warehouse-profiles/singlestore-profile /reference/warehouse-setups/singlestore-setup 302 +/reference/warehouse-profiles/sqlite-profile /reference/warehouse-setups/sqlite-setup 302 +/reference/warehouse-profiles/spark-profile /reference/warehouse-setups/spark-setup 302 +/reference/warehouse-profiles/mindsdb-profile /reference/warehouse-setups/mindsdb-setup 302 +/reference/warehouse-profiles/ibmdb2-profile /reference/warehouse-setups/ibmdb2-setup 302 +/reference/warehouse-profiles/firebolt-profile /reference/warehouse-setups/firebolt-setup 302 +/reference/warehouse-profiles/mysql-profile /reference/warehouse-setups/mysql-setup 302 +/reference/warehouse-profiles/hive-profile /reference/warehouse-setups/hive-setup 302 +/reference/using-sources /docs/build/sources 302 + +# getting started guide +/guides/getting-started /docs/get-started/getting-started/overview 301 +/guides/getting-started/building-your-first-project /docs/get-started/getting-started/building-your-first-project/build-your-first-models 301 +/guides/getting-started/building-your-first-project/build-your-first-models /docs/get-started/getting-started/building-your-first-project/build-your-first-models 301 +/guides/getting-started/building-your-first-project/schedule-a-job /docs/get-started/getting-started/building-your-first-project/schedule-a-job 301 +/guides/getting-started/building-your-first-project/test-and-document-your-project /docs/get-started/getting-started/building-your-first-project/test-and-document-your-project 301 +/guides/getting-started/create-a-project /docs/get-started/getting-started/create-a-project 301 +/guides/getting-started/getting-set-up /docs/get-started/getting-started/set-up-dbt-cloud 301 +/guides/getting-started/getting-set-up/setting-up-bigquery /docs/get-started/getting-started/getting-set-up/setting-up-bigquery 301 +/guides/getting-started/getting-set-up/setting-up-databricks /docs/get-started/getting-started/getting-set-up/setting-up-databricks 301 +/guides/getting-started/getting-set-up/setting-up-redshift /docs/get-started/getting-started/getting-set-up/setting-up-redshift 301 +/guides/getting-started/getting-set-up/setting-up-snowflake /docs/get-started/getting-started/getting-set-up/setting-up-snowflake 301 +/guides/getting-started/getting-started /docs/get-started/getting-started/set-up-dbt-cloud 301 +/guides/getting-started/learning-more /docs/get-started/getting-started-dbt-core 301 +/guides/getting-started/learning-more/getting-started-dbt-core /docs/get-started/getting-started-dbt-core 301 +/guides/getting-started/learning-more/refactoring-legacy-sql /docs/get-started/learning-more/refactoring-legacy-sql 301 +/guides/getting-started/learning-more/using-jinja /docs/get-started/learning-more/using-jinja 301 + +# ide ia redirects +/docs/dbt-cloud/cloud-ide/the-dbt-ide /docs/getting-started/dbt-cloud-features 301! +/docs/dbt-cloud/cloud-ide/handling-merge-conflicts /docs/collaborate/git/resolve-merge-conflicts 301! +/dbt-cloud/cloud-ide/viewing-docs-in-the-ide /docs/getting-started/develop-in-the-cloud 301! +/docs/dbt-cloud/cloud-ide/ide-beta /docs/getting-started/develop-in-the-cloud 301! +/docs/running-a-dbt-project/using-the-dbt-ide /docs/getting-started/develop-in-the-cloud 301! +/dbt-cloud/cloud-ide/the-ide-git-button /docs/collaborate/git/version-control-basics 301! + +# Community docs +/docs/contributing/long-lived-discussions-guidelines /community/resources/forum-guidelines 301 +/docs/guides/legacy/navigating-the-docs.md /community/contribute 301 +/community/writing-on-discourse/ /community/contributing/contributing-online-community 301 +/community/contributing/ /community/contribute 301 +/docs/contributing/contributor-license-agreements /community/resources/contributor-license-agreements 301 +/community/maintaining-a-channel /community/resources/maintaining-a-channel 301 +/docs/contributing/oss-expectations /community/resources/oss-expectations 301 +/docs/contributing/slack-rules-of-the-road /community/resources/slack-rules-of-the-road 301 + +/blog/getting-started-with-the-dbt-semantic-layer /blog/understanding-the-components-of-the-dbt-semantic-layer 301! +/docs/getting-started/develop-in-the-cloud#creating-a-development-environment /docs/get-started/develop-in-the-cloud#set-up-and-access-the-cloud-ide 301 diff --git a/contributing/adding-page-components.md b/contributing/adding-page-components.md new file mode 100644 index 00000000000..5cbe8a6d471 --- /dev/null +++ b/contributing/adding-page-components.md @@ -0,0 +1,90 @@ +## Using warehouse components + +You can use the following components to provide code snippets for each supported warehouse. You can see a real-life example in the docs page, "[Initialize your database](/docs/get-started/getting-started/getting-set-up/setting-up-databricks#initialize-your-dbt-project)." + +Identify code by labeling with the warehouse names: + +```js + + +
+ + ```sql + select * from `dbt-tutorial.jaffle_shop.customers` + ``` + +
+ +
+ + ```sql + select * from default.jaffle_shop_customers + ``` + +
+ +
+``` + +## Using tabs for multiple resources + +You can use the following components to provide code snippets in a tabbed view. You can see a real-life example in the docs page, "[Building models](https://docs.getdbt.com/docs/building-a-dbt-project/building-models#building-dependencies-between-models)." + +Identify code and code files by labeling with the component they are describing: + +```code + + + + + + + ```sql + + {{ config( + + ) }} + + select ... + + + ``` + + + + + + ```yml + models: + [](resource-path): + + + ``` + + + + + + + + + + ```yml + sources: + [](resource-path): + + + ``` + + + + + + +``` \ No newline at end of file diff --git a/contributing/content-style-guide.md b/contributing/content-style-guide.md new file mode 100644 index 00000000000..d59eb98e118 --- /dev/null +++ b/contributing/content-style-guide.md @@ -0,0 +1,417 @@ +# Content style guide + +Welcome to the content style guide for docs.getdbt.com! We aim to provide docs that help readers accomplish their goals. To this end, we should focus on clarity and meaning in our sentences, and follow up with consistency and grammatical correctness when we can. + +This guide includes standards we want to emphasize, likely because we've made deliberate decisions about them. You can refer to [_The Microsoft Writing Style Guide_](https://docs.microsoft.com/en-us/style-guide/welcome/) and the [_Chicago Manual of Style_](https://www.chicagomanualofstyle.org/home.html) for those nagging questions like, "[Should I use an Em dash, En dash or hyphen?](https://docs.microsoft.com/en-us/style-guide/punctuation/dashes-hyphens/)" + +### Table of Contents +* [Callouts](#callouts) +* [Text formatting](#Text-formatting) +* [UI elements](#UI-elements) +* [Titles](#Titles) +* [Placeholder text](#Placeholder-text) +* [Oxford comma](#Oxford-comma) +* [Lists](#Lists) +* [Tables](#Tables) +* [Word choice & terminology](#Word-choice-&-terminology) +* [Links](#Links) +* [Images](#Images) +* [Talk to us](#Talk-to-us) + +## Callouts + +Callouts highlight important or high-value information that readers need to know. We want callouts to stand out, so we should keep their content to a minimum, avoiding general information, permissions, or prerequisites. Too much information can make it difficult to absorb. Imagine driving down one block with five stop signs!?! + +Use callouts sparingly for high-value information — avoid including general information, permissions, or prerequisites in callouts. + +## Text formatting +You consider many elements when designing technical docs, and the way you format text can help you achieve a well-articulated design. With consistency of use, well-formatted text creates a single voice when there are multiple authors/contributors, increases the readability of the content, and further establishes an organization's branding. + +### Italics +Use italics to decorate text for emphasis. + +:white_check_mark: Do _not_ leave any personal belongings on the bus. + +### Bold +Don't use bold for emphasis. It's reserved for titles, headers, and UI elements. + +:white_check_mark: Click **Pay** to complete your purchase. + +:x: **DO NOT** lock the door. + +### Code +Use code font to decorate text for: + +|Text |Example | +|-----|--------| +| source code (like SQL, YAML, and JavaScript) | `select * from customers` | +| [placeholder text](#placeholder-text) | `CUSTOMER_ID` | +| directory paths | `/opt/homebrew/bin/` | +| directory names | The file is in the `etc` directory. | +| filenames | Update your `dbt_project.yml` to configure this | +| git branch names | When done, merge your PR to `main` | +| commands | To check the status of a running cluster, use the `ghe-cluster-status` command | +| arguments, parameters, keys | Update the `name` in your YAML file | + +Use [code blocks](#code-blocks) for longer or more complex commands. Do _not_ use multiple font decorations on text as it can cause it to be visually busy and so harder to read; for example, avoid inline links in command names. + +### Code blocks + +Keep lines in code samples to about 60 characters, to avoid requiring readers to scroll horizontally in the code block. Locate explanatory text before the code block, rather than using comments inside the code block. + +Within code blocks: + +* Avoid using markup before the command output. +* Only use $ before the command itself if you’re showing the command’s output in the same block. + +#### Code block examples + +Provide context for code examples referring to a larger file by showing the relevant section of the file, so that users understand how to edit their own code. + +:white_check_mark: Use: + +```yaml +name: my_dbt_project +version: 1.0.0 + +config-version: 2 + +vars: + # The `start_date` variable will be accessible in all resources + start_date: '2021-06-01' +``` + +:x: Avoid: +```yaml +config-version: 2 + +vars: + # The `start_date` variable will be accessible in all resources + start_date: '2021-06-01' +``` + +## UI elements + +Similar to [text formatting](#text-formatting), consistent use of how we refer to the user interface (UI) elements can help increase the scannability and readability of the docs for our readers. + +The following sections describe the guidelines for documenting UI elements. + +### Buttons + +Bold the button name and use the term click. + +:white_check_mark: Click **Submit** to create a new branch. + +:x: Click the **Submit** button to create a new branch. + +### Checkboxes + +Bold the name of the checkbox. You can use the terms select, choose, or clear. + +:white_check_mark: Select the **New** option. + +:white_check_mark: Clear the **New** option. + +:x: Check the **New** option. + +:x: Uncheck the **New** option. + +### Dropdown menus + +Bold the name of the dropdown menu and also the names of its list items. You can use the terms select or choose. + +:white_check_mark: In the **Create** menu, select the **From a template** option. + +:white_check_mark: Choose **Create** > **From a template** to create a new page. + +### Radio buttons + +Bold the name of the radio button. You can use the terms select, choose, or clear. + +:white_check_mark: Choose the **Small size** option. + +:white_check_mark: Clear the **Small size** option. + +:x: Click the **Small size** radio button. + +### Text fields + +Bold the name of the text field and use the term enter for user input. + +:white_check_mark: In the **Address** field, enter your company's address. + +### Location of UI elements + +When referring to UI elements, describe its position in the software application to help users locate it easily. You can use upper, lower, center, left, and right to do this. + +:white_check_mark: Use the search box in the upper left corner to explore more topics. + +:white_check_mark: You can view alerts in the lower right corner of the tool. + +:white_check_mark: You can manage your projects in the file explorer on the left side of the page. + +:white_check_mark: Access all guides from the Guides menu at the top of the page. + + +### UI text + +When referring to different sections of the IDE, use the name of the section and bold it. Avoid using the terms panel and pane. + +:white_check_mark: In the **Settings** section, choose the default address for your account. + +:x: You can review all your past orders from the **History** pane. + +## Titles + +People make use of titles in many places like table headers, section headings (such as an H2, H3, or H4), page titles, sidebars, and so much more. + +When generating titles or updating them, use sentence case. It sets a more conversational tone to the docs—making the content more approachable and creating a friendly feel. + +We've defined five content types you can use when contributing to the docs (as in, writing or authoring). Learn more about title guidelines for [each content type](https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/content-types.md). + +## Placeholder text + +Placeholder text is something that the user should replace with their own text. For example, their website domain name. + +Use all capital letters([screaming snake case](https://fission.codes/blog/screaming-snake-case/)) to indicate text that changes in the user interface or that the user needs to supply in a command or code snippet. Avoid surrounding it in brackets or braces, which someone might copy and use, producing an error. + +Identify what the user should replace the placeholder text with in the paragraph preceding the code snippet or command. + +:white_check_mark: The following is an example of configuring a connection to a Redshift database. In your YAML file, you must replace `CLUSTER_ID` with the ID assigned to you during setup: + +```yaml +my-redshift-db: + target: dev + outputs: + dev: + type: redshift + method: iam + cluster_id: CLUSTER_ID + +``` + +## Oxford comma + +Use an Oxford comma (serial comma) when you have a series with three or more terms. + +The sentence below could mean my parents are Maria and Lin or it could mean they are people I love in addition to my parents. + +:x: I love my parents, Maria and Lin. + +Removing this ambiguity by using Oxford commas makes content easier to translate. + +:white_check_mark: I love my parents, Maria, and Lin. + +## Lists + +People often scan technical documentation until they find the information they’re looking for, instead of reading it line by line. Lists are a great way to present content in a scannable format. + +There are bulleted (unordered) lists and numbered (ordered) lists. If the list items can be in any order, use a bulleted list. For a prioritized list or a set of steps, use a numbered list. + +Guidelines for making lists are: +- There are at least two items. +- All list items follow a consistent, grammatical structure (like each item starts with a verb, each item begins with a capitalized word, each item is a sentence fragment). +- Lists items don't end in commas, semicolons, or conjunctions (like "and", "or"). However, you can use periods if they’re complete sentences. +- Introduce the list with a heading or, if it's within text, as a complete sentence or as a sentence fragment followed by a colon. + +If the list starts getting lengthy and dense, consider presenting the same content in a different format such as a table, as separate subsections, or a new guide. + +### Examples of lists + +A bulleted list with introductory text: + +> A dbt project is a directory of `.sql` and .yml` files. The directory must contain at a minimum: +> +> - Models: A model is a single `.sql` file. Each model contains a single `select` statement that either transforms raw data into a dataset that is ready for analytics, or, more often, is an intermediate step in such a transformation. +> - A project file: A `dbt_project.yml` file, which configures and defines your dbt project. + +A bulleted list with sentence fragments: + +> Supported languages: +> - Python +> - Java +> - C++ + +A numbered list following an H2 heading: + +> ## Check out a new git branch +> +> 1. Make sure you're in the Develop interface. If you're not, click the hamburger menu, then click `Develop`. The main branch is now set as read-only mode so you'll need to create a new branch. +> +> 2. Click **+ create new branch** and enter `add-customers-model` for the branch name. + +## Tables +Tables provide a great way to present complex information and can help the content be more scannable for users, too. + +There are many ways to construct a table, like row spanning and cell splitting. Make sure the content is clear, concise, and presents well on the web page (like avoid awkward word wrapping). + +Guidelines for making tables are: +- Introduce the table with a heading or, if it's within text, as a complete sentence or as a sentence fragment followed by a colon. +- Use a header row +- Use sentence case for all content, including the header row +- Content can be complete sentences, sentence fragments, or single words (like `Currency`) + +If the table starts getting cumbersome and hard to read (that is, bad UX), consider presenting the same content in a different format such as a definition list. Consider your design of the content, too (like using flag icons instead of country names as it takes up less space on a page). Or, you could split the content so they’re in separate subsections, separate tabs, or separate pages (like by data warehouse or programming language). + +### Examples of tables + +A table with introductory text: + +> You can use **custom schemas** in dbt to build models in a schema other than your target schema. By default, dbt generates a model's schema name by concatenating the custom schema to the target schema, as follows: `_;`. +> +> | Target schema | Custom schema | Resulting schema | +> | ------------- | ------------- | ---------------- | +> | <target_schema> | None | <target_schema> | +> | analytics | None | analytics | +> | dbt_alice | None | dbt_alice | +> | <target_schema> | <custom_schema> | <target_schema>\_<custom_schema> | +> | analytics | marketing | analytics_marketing | +> | dbt_alice | marketing | dbt_alice_marketing | + +A table following an H3 heading: + +> ### Arguments +> | Name | Description | Values | +> | -----| ----------- | ------ | +> | `-help` | Displays information on how to use the command. | Doesn't take any values. | +> | `-readable` | Print output in human readable format. | | +> | `-file` | Print output to file instead of stdout. | Name of the file. | + +## Word choice & terminology + +### Abbreviations + +According to The American Heritage Dictionary, an abbreviation is "[a] shortened form of a word or phrase used chiefly in writing to represent the complete form" but, unlike the acronym, it's letters aren't pronounced together as one full word. + +### Acronyms + +Spell out acronyms the first time they’re used in an article, except in titles or headers. If an uncommon abbreviation appears in the title, define it in the first couple of lines of the body text. + +_Do_ provide the full word or phrase being abbreviated before the abbreviation itself and encapsulate within parentheses.
+ * *Example: Integrated Development Environment (IDE)* + +**DO NOT** use an acronym if its only used once. Please use the full word or phrase for its one-time use. + +### Latin abbreviations + +Avoid using Latin abbreviations. These terms are harder to localize/translate. + +Some common Latin abbreviations and other words to use instead: + +| Avoid | Use | Example | +|--------------------|------------|---------| +| i.e. | that is | Use incremental models when your dbt runs are becoming too slow (that is, don't start with incremental models) | +| e.g. | | | +| etc. | | | + + +### Prepositions + +Avoid ending a sentence with a preposition unless the rewritten sentence would sound awkward or too formal. + +### Product names + +Product names, trademarks, services, and tools should be written as proper nouns, unless otherwise specified by the company or trademark owner. + +### Terms to use or avoid + +Use industry-specific terms and research new/improved terminology. Also refer to the Inclusive Language section of this style guide for inclusive and accessible language and style. + +**DO NOT** use jargon or language familiar to a small subset of readers or assume that your readers understand ALL technical terms. + +Use | Avoid +-- | -- +type (in the user interface) | enter (in the user interface) +enter (in the command line) | type (in the command line) +email | e-mail +on dbt | on a remote server +person, human | client, customer +press (a key) | hit, tap +recommended limit | soft limit +sign in | log in, login +sign up | signup +terminal | shell +username | login + + +## Links + +Links embedded in documentation are about trust. Users trust that we will lead them to sites or pages related to their reading content. In order to maintain that trust, it is important that links are transparent, up-to-date, and lead to legitimate resources. + +### Link format + +Hyperlinks should be text only, please avoid image-based links. The text should clearly state the destination. + + :x: For more information, visit https://docs.getdbt.com + + :x: For more information, [_Click Here_](https://docs.getdbt.com/) + + ✅ For more information, visit the [_dbt Labs doc site_](https://docs.getdbt.com/). + +### Link destinations + +The link destination should be directly related to the topic of the document. There are many valid destinations, including educational material, third-party product instructions, explanations of a limitation, FAQs, and other pages on the dbt Labs site. + +Some destination types should be avoided entirely: +* Sales or promotional material. +* General landing pages - link to specific information. +* Content that is hidden behind paywalls or that requires credentials to access. +* Untrusted or otherwise questionable sites that might contain dubious information, malware, or suspicious behavior. +* Personal sites such as file sharing folders or blogs. +* Instant downloads (the user clicks the link and the download begins immediately). If you need to host a file, please contact dbt Labs for a solution. + + +## Images + +### Alt text + +In order to include people using screen readers, remember to add alternate text for images. Every image should include an alt attribute that provides a complete description of the image for the user. For more information, see “Images, image maps, and multimedia” in Microsoft’s Style Guide. + +### Icons +When describing icons that appear on-screen, use the [_Google Material Icons_](https://fonts.google.com/icons?selected=Material+Icons) reference page as a guide. + +:x:Click on the hamburger menu icon + +:white_check_mark:Click on the menu icon + +### Image names + +Two words that are either adjectives or nouns describing the name of a file separated by an underscore `_` (known as `snake_case`). The two words can also be separated by a hyphen (`kebab-case`). +* Types and Examples + * `snake_case` + * *gitlab_setup.jpg* + * `kebab-case` + * *sso-setup.jpg* + +### Screenshots + +There are scenarios where a visual aid may be beneficial, especially when creating a document highlighting navigation. Screenshots provide a lightweight alternative to videos and are much easier to produce, edit, and insert into documents. Due to their limitations, they should be used sparingly. + +It is important to remember that all of dbt Labs documentation is currently in English only. Users might require screen readers or translation programs to read content and these will not pick up the text within screenshots. + +#### How to take a screenshot + +Both macOS and Windows include tools that allow you to capture and manipulate screenshots. You can use alternatives based on your preferences, but avoid programs that place a watermark over the content. + +* [How to take screenshots in MacOS](https://support.apple.com/en-us/HT201361#:~:text=How%20to%20take%20a%20screenshot,to%20save%20to%20your%20desktop.) +* [How to take screenshots with the Snipping Tool in Windows](https://support.microsoft.com/en-us/windows/use-snipping-tool-to-capture-screenshots-00246869-1843-655f-f220-97299b865f6b#:~:text=Press%20Ctrl%20%2B%20PrtScn%20keys.,that%20you%20want%20to%20capture.) + +#### Screenshot guidelines + +Once you have determined that a screenshot will add value to the document where words alone can't, refer to these guidelines for capturing the information: + +* Use screenshots to highlight navigation, on-screen elements, and other noteworthy product visuals. +* Avoid using screenshots to demonstrate inputs and outputs. All code snippets and sample results should be in the documents as text fields. +* Use concise filenames that are relevant to the content contained within. Enumerate them if they are part of a sequence. + + :x: screenshot-august0822.jpg + + :white_check_mark: viewing-admins-01.jpg + +* Redact all sensitive information from the screenshot — names, email addresses, phone numbers, or anything that could be considered personally identifiable information (PII) should be obfuscated. +* Avoid URL and bookmark bars in the screenshot and limit the scope to the product and page being documented. + +## Talk to us +Your feedback helps drive us forward. At dbt Labs, we want you to get involved if you see areas in the documentation that need improvement. That might include becoming a docs contributor or simply filing a GitHub issue so we know where to look. We have an incredible community of contributors, and our documents reflect that. + +dbt Labs is growing, and a team of technical writers is now handling reviews and requests. This style guide will continue to grow as we identify more ways to make the documents easily accessible to all users. diff --git a/contributing/content-types.md b/contributing/content-types.md index f1094e29b2c..4654ada9255 100644 --- a/contributing/content-types.md +++ b/contributing/content-types.md @@ -102,7 +102,7 @@ Procedural content should include troubleshooting tips as frequently as possible ## Guide -Guides (formerly called long-form procedural articles) are highly-approachable articles that group information in context to help readers complete a complex task or set of related tasks. Guides eliminate duplication and ensure the customer finds contextual content in the right place. Guides may be a set of tasks within the reader’s larger workflow, such as including use cases. +Guides are highly-approachable articles that group information in context to help readers complete a complex task or set of related tasks. Guides eliminate duplication and ensure people find contextual content in the right place. Guides may be a set of tasks within the reader’s larger workflow, such as including use cases. Guides combine the content types within a single article to illustrate an entire workflow within a single page, rather than splitting the workflow out into separate pieces. Guides containing multiple procedures help us scale as more options are added to the product. Users may need to complete different procedures within the guide at different times, or refer back to the guide for conceptual content or to complete a followup task. Example usage: If there is a large number of the same type of setting, use a guide that gathers all of the tasks in context. diff --git a/contributing/contributor-code-of-conduct.md b/contributing/contributor-code-of-conduct.md new file mode 100644 index 00000000000..4e2f1a3be32 --- /dev/null +++ b/contributing/contributor-code-of-conduct.md @@ -0,0 +1,39 @@ +## Documentation contributor code of conduct + +dbt Labs is privileged to have a passionate community of supporters from all over the world. The contributors to the community represent the very best of what we can accomplish when people work collaboratively. The code of conduct is the cornerstone of informative, accessible, and inclusive documentation. + +### Be inclusive + +It is critical that all of dbt Labs documentation is inclusive and respectful of the audience, which consists of people in widely varying circumstances from all over the planet. When writing documentation, use words that are inclusive, anti-racist, and accessible. + +Individual words might be small, but together they can create community, belonging, and equity. Be empathetic in all word and style choices. Be accurate when referring to people and communities. + + +Here are resources to help you think about inclusive and accessible language and style: +* [18F Content Guide on Inclusive Language](https://content-guide.18f.gov/inclusive-language/) +* [Plain language guidelines (US Gov)](https://www.plainlanguage.gov/guidelines/) +* MailChimp Content Style Guide: + * [Writing About People](https://styleguide.mailchimp.com/writing-about-people/) + * [Writing for Accessibility](https://styleguide.mailchimp.com/writing-for-accessibility/) +* [Readability Guidelines (UK)](https://readabilityguidelines.co.uk/) +* [Conscious Style Guide](https://consciousstyleguide.com/) + +### Be respectful + +dbt Labs greatly admire the people who use dbt, partner with the organization, and implement our open source code. We strive for the tone of our documentation to reflect this and never make the reader feel patronized. + +Please don't use a disparaging tone when writing about another organization’s product or services. Remain neutral (or positive) and remove any personal feelings you might have from the content. + +Through mutual respect for one another, we can enlighten the world. + +### Be mindful + +Users represent a wide variety of positions in organizations across the world. The goal is to make our documents as appealing and easy to understand to as many people as possible. The dbt Labs style guide is a framework, but the experience you bring to the docs is what will give the most value to our readers. + +dbt Labs currently only publishes the content in English, but there are many users who are not native speakers. Keep the wording clear and professional. Avoid idioms, jokes, and anything else that might muddle the point. + +Write in straightforward language that requires minimal effort to parse or translate. This helps us reach a wider audience. + +### Be collaborative + +Community members and dbt Labs staff are in this together. We are a passionate group working towards a common goal — making dbt accessible to everyone. Just as we value community feedback, we are prepared to give feedback to all contributors, fostering an environment of friendly and respectful conversation. Together we can accomplish more than we ever could as individuals. You are an integral part of dbt Labs, and your enthusiasm for collaboration is essential to dbt Labs success. diff --git a/contributing/developer-blog.md b/contributing/developer-blog.md new file mode 100644 index 00000000000..1ad3c271ed3 --- /dev/null +++ b/contributing/developer-blog.md @@ -0,0 +1,65 @@ + +* [Contributing](#contributing) +* [Core Principles](#core-principles) + +## Contributing + +The dbt Developer Blog is a place where analytics practitioners can go to share their knowledge with the community. Analytics Engineering is a discipline we’re all building together. The developer blog exists to cultivate the collective knowledge that exists on how to build and scale effective data teams. + +### What makes a good developer blog post? + +- The short answer: Practical, hands on analytics engineering tutorials and stories + - [Slim CI/CD with Bitbucket](https://docs.getdbt.com/blog/slim-ci-cd-with-bitbucket-pipelines) + - [So You Want to Build a dbt Package](https://docs.getdbt.com/blog/so-you-want-to-build-a-package) + - [Founding an Analytics Engineering Team](https://docs.getdbt.com/blog/founding-an-analytics-engineering-team-smartsheet) +- See the [Developer Blog Core Principles](#core-principles) + +### How do I submit a proposed post? + +To submit a proposed post, open a `Contribute to the dbt Developer Blog` issue on the [Developer Hub repo](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose). You will be asked for: + +- A short (one paragraph) summary of the post you’d like to publish +- An outline of the post + +You’ll hear back from a member of the dbt Labs teams within 7 days with one of three responses: + +- The post looks good to go as is! We’ll ask you to start creating a draft based off of the initial outline you submitted +- Proposed changes to the outline. This could be additional focus on a topic you mention that’s of high community interest or a tweak to the structure to help with narrative flow +- Not a fit for the developer blog right now. We hugely appreciate *any* interest in submitting to the Developer Blog - right now our biggest backlog is capacity to help folks get these published. See below on how we are thinking about and evaluating potential posts. + +### What is the process once my blog is accepted? + +Once a blog is accepted, we’ll ask you for a date when we can expect the draft by. Typically we’ll ask that you can commit to having this ready within a month of submitting the issue. + +Once you submit a draft, we’ll return a first set of edits within 5 business days. + +The typical turnaround time from issue creation to going live on the developer blog is ~4 to 6 weeks. + +### What happens after my blog is published? + +We’ll share the blog on the dbt Labs social media channels! We also encourage you to share on the dbt Slack in #i-made-this. + +### What if my post doesn’t get approved? + +We want to publish as many community contributors as possible, but not every post will be a fit for the Developer Blog. That’s ok! There are many different reasons why we might not be able to publish a post right now and none of them reflect on the quality of the proposed post. + +- **dbt Labs capacity**: We’re committed to providing hands-on feedback and coaching throughout the process. Our goal is not just to generate great developer blogs - it’s to help build a community of great writers / practitioners who can share their knowledge with the community for years to come. This necessarily means we will be able to take on a lower absolute number of posts in the short term, but will hopefully be helpful for the community long term. +- **Focus on narrative / problem solving - not industry trends**: The developer blog exists, primarily, to tell the stories of analytics engineering practitioners and how they solve problems. The idea is that reading the developer blog gives a feel for what it is like to be a data practitioner on the ground today. This is not a hard and fast rule, but a good way to approach this is “How I/we solved X problem” rather than “How everyone should solve X problem”. + +We are very interested in stacks, new tools and integrations and will happily publish posts about this - with the caveat that the *focus* of the post should be solving real world problems. Hopefully if you are writing about these, this is something that you have used yourself in a hands on, production implementation. + +- **Right sized scope**: We want to be able to cover a topic in-depth and dig into the nuances. Big topics like “How should you structure your data team” or “How to ensure data quality in your organization” will be tough to cover in the scope of a single post. If you have a big idea - try subdividing it! “How should you structure your data team” could become “How we successfully partnered with our RevOps team on improving lead tracking” and “How to ensure data quality in your organization” might be “How we cleaned up our utm tracking”. + +### What if I need help / have questions: + +- Feel free to post any questions in #community-writers on the dbt Slack. + +## Core Principles + +- 🧑🏻‍🤝‍🧑🏾 The dbt Developer blog is written by humans **- individual analytics professionals sharing their insight with the world. To the extent feasible, a community member posting on the developer blog is not staking an official organizational stance, but something that *they* have learned or believe based on their work. This is true for dbt Labs employees as well. +- 💍 Developer blog content is knowledge rich - these are posts that readers share, bookmark and come back to time and time again. +- ⛹🏼‍♂️ Developer blog content is written by and for *practitioners* - end users of analytics tools (and sometimes people that work with practitioners). +- ⭐ Developer blog content is best when it is *the story which the author is uniquely positioned to tell.* Authors are encouraged to consider what insight they have that is specific to them and the work they have done. +- 🏎️ Developer blog content is actionable - readers walk away with a clear sense of how they can use this information to be a more effective practitioner. Posts include code snippets, Loom walkthroughs and hands-on, practical information that can be integrated into daily workflows. +- 🤏 Nothing is too small to share - what you think is simple has the potential to change someone's week. +- 🔮 Developer blog content is present focused —posts tell a story of a thing that you've already done or are actively doing, not something that you may do in the future. diff --git a/contributing/versioningdocs.md b/contributing/single-sourcing-content.md similarity index 94% rename from contributing/versioningdocs.md rename to contributing/single-sourcing-content.md index 6096c8e2e83..1c47b87b977 100644 --- a/contributing/versioningdocs.md +++ b/contributing/single-sourcing-content.md @@ -1,17 +1,19 @@ -# Managing Available Versions +# Single-sourcing content -* [Adding a new version](#adding-a-new-version) -* [Using end-of-life dates](#using-end-of-life-dates) -* [Versioning entire pages](#versioning-entire-pages) -* [Versioning blocks of content](#versioning-blocks-of-content) +* [About versioning](#adding-a-new-version) + * [Using end-of-life dates](#using-end-of-life-dates) + * [Versioning entire pages](#versioning-entire-pages) + * [Versioning blocks of content](#versioning-blocks-of-content) * [Using global variables](#using-global-variables) * [Reusing snippets of content](#reusing-snippets-of-content) +## About versioning + Versions are managed in the `versions` array located in the `website/dbt-versions.js` file. This array holds all versions which are selectable from the versions dropdown in the navigation. **The first version in the array is the latest version.** This is the default version when a visitor loads the page. -## Adding a new version +### Adding a new version To add a new version to the site, a new object must be added to the `versions` array in the same format as existing versions. This object holds two properties: **version** and **EOLDate (See End of Life Dates below)**. @@ -28,7 +30,7 @@ exports.versions = [ The **version** property is the value which shows in the nav dropdown. This value is compared to the VersionBlock component on a docs page to determine whether that section should be visible for the current active version (See the **Versioning the Sidebar** section on using the VersionBlock component). -## Using end-of-life dates +### Using end-of-life dates The **EOLDate** property determines when a version is no longer supported. A version is supported up until 1 year after its release. @@ -39,11 +41,11 @@ Two different versions of the banner will show depending on the end-of-life date - When the version is within 3 months of the **EOLDate.** - When the version has passed the **EOLDate.** -### Updating EOL date banner language +#### Updating EOL date banner language The content for these two EOLDate banners are located in the `website/src/theme/DocPage/index.js` file, in the `EOLBannerText` property. -## Versioning entire pages +### Versioning entire pages If a Docs page should not be available for the selected version, it is possible to version the entire page. This is managed in the `versionedPages` array within the `website/dbt-versions.js` file. @@ -58,7 +60,7 @@ Example of how to version a page in the `dbt-versions.js` file: ```jsx exports.versionedPages = [ { - "page": "docs/available-adapters", + "page": "docs/supported-data-platforms", "firstVersion": "0.21", } ] diff --git a/netlify.toml b/netlify.toml new file mode 100644 index 00000000000..6ab92757410 --- /dev/null +++ b/netlify.toml @@ -0,0 +1,2 @@ +[build] + functions = "functions" diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000000..058db9205e2 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "docs.getdbt.com", + "lockfileVersion": 2, + "requires": true, + "packages": {} +} diff --git a/website/.gitignore b/website/.gitignore index da453025ae3..e0c5b212ea1 100644 --- a/website/.gitignore +++ b/website/.gitignore @@ -8,6 +8,10 @@ .docusaurus .cache-loader +# cypress artifacts +/cypress/videos +/cypress/screenshots + # misc .DS_Store .env diff --git a/website/blog/2019-05-01-how-we-structure-dbt-projects.md b/website/blog/2019-05-01-how-we-structure-dbt-projects.md index 8f38ec3de33..8e1d4f59e32 100644 --- a/website/blog/2019-05-01-how-we-structure-dbt-projects.md +++ b/website/blog/2019-05-01-how-we-structure-dbt-projects.md @@ -24,11 +24,11 @@ It’s important to note that **this is not the only, or the objectively best, w * our views on data model design; which in turn are influenced by: * the kinds of analytics problems we are solving for clients -* the data stack we typically work within, in which multiple data sources are loaded by third party tools, and the data warehouse is optimized for analytical queries (therefore we aren’t tightly bounded by performance optimization considerations). +* the data stack we typically work within, in which multiple data sources are loaded by third party tools, and the is optimized for analytical queries (therefore we aren’t tightly bounded by performance optimization considerations). Our opinions are **almost guaranteed to change over time** as we update our views on modeling, are exposed to more analytics problems, and data stacks evolve. It’s also worth clearly stating here: the way we structure dbt projects makes sense for our projects, but may not be the best fit for yours! This article exists on Discourse so that we can have a conversation – I would love to know how others in the community are structuring their projects. -In comparison, the (recently updated) [best practices](/docs/guides/best-practices) reflect principles that we believe to be true for any dbt project. Of course, these two documents go hand in hand – our projects are structured in such a way that makes the those principles easy to observe, in particular: +In comparison, the (recently updated) [best practices](/guides/best-practices) reflect principles that we believe to be true for any dbt project. Of course, these two documents go hand in hand – our projects are structured in such a way that makes the those principles easy to observe, in particular: * Limit references to raw data * Rename and recast fields once @@ -37,8 +37,6 @@ In comparison, the (recently updated) [best practices](/docs/guides/best-practic * Consider the information architecture of your data warehouse * Separate source-centric and business-centric transformations -If you want to see what the code for one of our projects looks like, check out [this demonstration dbt project](https://github.com/dbt-labs/dbt-learn-demo/tree/day2-dbt-training/models). - We also recently held (and recorded) an office hours on this topic – this article provides a high level outline, but there’s a lot more detail and discussion in the [video](https://youtu.be/xzKLh342s08). Lastly, before I dive in, a huge thank you to Jeremy Cohen for not only teaching me a lot of the material in this article, but also for doing a lot of the groundwork that went into this article – entire sections of this article are in fact lifted from his work. @@ -69,7 +67,7 @@ In our dbt projects, this leads us to our first split in our `models/` directory └── models ├── marts └── staging - + ``` ## Staging raw data @@ -106,7 +104,7 @@ Each staging directory contains at a minimum: * A `src_.yml` file which contains: * [Source](/docs/building-a-dbt-project/using-sources) definitions, tests, and documentation * A `stg_.yml` file which contains - * [Tests](/docs/building-a-dbt-project/tests) and [documentation](/docs/building-a-dbt-project/documentation) for models in the same directory + * [Tests](/docs/build/tests) and [documentation](/docs/building-a-dbt-project/documentation) for models in the same directory ``` ├── dbt_project.yml @@ -119,7 +117,7 @@ Each staging directory contains at a minimum: ├── stg_braintree__customers.sql └── stg_braintree__payments.sql ``` - + Some dbt users prefer to have one `.yml` file per model (e.g. `stg_braintree__customers.yml`). This is a completely reasonable choice, and we recommend implementing it if your `.yml` files start to become unwieldy. @@ -127,29 +125,29 @@ Some dbt users prefer to have one `.yml` file per model (e.g. `stg_braintree__cu Earlier versions of the dbt documentation recommended implementing “base models” as the first layer of transformation – and we used to organize and name our models in this way, for example `models/braintree/base/base_payments.sql`. -We realized that while the reasons behind this convention were valid, the naming was an opinion, so in our recent update to the [best practices](/docs/guides/best-practices), we took the mention of base models out. Instead, we replaced it with the principles of “renaming and recasting once” and “limiting the dependencies on raw data”. +We realized that while the reasons behind this convention were valid, the naming was an opinion, so in our recent update to the [best practices](/guides/best-practices), we took the mention of base models out. Instead, we replaced it with the principles of “renaming and recasting once” and “limiting the dependencies on raw data”. That being said, in our dbt projects every source flows through exactly one model of the following form: ``` with source as ( - + select * from {{ source('braintree', 'payments') }} - + ), - + renamed as ( - + select id as payment_id, order_id, convert_timezone('America/New_York', 'UTC', createdat) as created_at, ... - + from source - + ) - + select * from renamed ``` @@ -175,7 +173,7 @@ In our dbt projects, we place these base models in a nested `base` subdirectory. ├── stg_braintree.yml ├── stg_braintree__customers.sql └── stg_braintree__payments.sql -``` +``` In our projects, base models: diff --git a/website/blog/2020-07-01-how-to-create-near-real-time-models-with-just-dbt-sql.md b/website/blog/2020-07-01-how-to-create-near-real-time-models-with-just-dbt-sql.md index 35b24c74f88..944d6fdd3f9 100644 --- a/website/blog/2020-07-01-how-to-create-near-real-time-models-with-just-dbt-sql.md +++ b/website/blog/2020-07-01-how-to-create-near-real-time-models-with-just-dbt-sql.md @@ -13,7 +13,6 @@ date: 2020-07-01 is_featured: false --- - Before I dive into how to create this, I have to say this. **You probably don’t need this**. I, along with my other Fishtown colleagues, have spent countless hours working with clients that ask for near-real-time streaming data. However, when we start digging into the project, it is often realized that the use case is not there. There are a variety of reasons why near real-time streaming is not a good fit. Two key ones are: 1. The source data isn’t updating frequently enough. @@ -28,7 +27,7 @@ So when presented with a near-real-time modeling request, I (and you as well!) h Recently I was working on a JetBlue project and was presented with a legitimate use case: operational data. JetBlue’s Crewmembers need to make real-time decisions on when to close the door to a flight or rebook a flight. If you have ever been to an airport when there is a flight delay, you know how high the tension is in the room for airline employees to make the right decisions. They literally cannot do their jobs without real-time data. -If possible, the best thing to do is to query data as close to the source as possible. You don’t want to hit your production database unless you want to frighten and likely anger your DBA. Instead, the preferred approach is to replicate the source data to your analytics warehouse, which would provide a suitable environment for analytic queries. In JetBlue’s case, the data arrives in JSON blobs, which then need to be unnested, transformed, and joined before the data becomes useful for analysis. There was no way to just query from the source to get the information people required. +If possible, the best thing to do is to query data as close to the source as possible. You don’t want to hit your production database unless you want to frighten and likely anger your DBA. Instead, the preferred approach is to replicate the source data to your analytics warehouse, which would provide a suitable environment for analytic queries. In JetBlue’s case, the data arrives in blobs, which then need to be unnested, transformed, and joined before the data becomes useful for analysis. There was no way to just query from the source to get the information people required. Tldr: If you need transformed, operational data to make in-the-moment decisions then you probably need real-time data. @@ -146,7 +145,7 @@ As you start to create more lambda views, you will want to make the filter into ``` -\*\* Note for the macro above, the timestamp is `var(lambda_timestamp, run_started_at)`. We want to default to the last time the historical models were run but allow for flexibility depending on the situation. It would be useful to note that we used [run\_started\_at timestamp](https://docs.getdbt.com/reference/dbt-jinja-functions/run_started_at/) rather than `current_timestamp()` to avoid any situations where there is a job failure and the historical table hasn’t been updated for the last 5 hours. +\*\* Note for the macro above, the timestamp is `var(lambda_timestamp, run_started_at)`. We want to default to the last time the historical models were run but allow for flexibility depending on the situation. It would be useful to note that we used [run\_started\_at timestamp](/reference/dbt-jinja-functions/run_started_at/) rather than `current_timestamp()` to avoid any situations where there is a job failure and the historical table hasn’t been updated for the last 5 hours. ### Write idempotent models diff --git a/website/blog/2021-02-05-dbt-project-checklist.md b/website/blog/2021-02-05-dbt-project-checklist.md index 4e07b57631d..6100e7ff089 100644 --- a/website/blog/2021-02-05-dbt-project-checklist.md +++ b/website/blog/2021-02-05-dbt-project-checklist.md @@ -2,7 +2,7 @@ title: "Your Essential dbt Project Checklist" description: "A checklist created to guide our internal work, which you can use to clean up your own dbt project." slug: essential-dbt-project-checklist -canonical_url: https://discourse.getdbt.com/t/your-essential-dbt-project-checklist/1377 +canonical_url: https://discourse.getdbt.com/t/your-essential-dbt-project-checklist/1377 authors: [amy_chen, dave_connors] @@ -15,7 +15,7 @@ is_featured: true If you’ve been using dbt for over a year, your project is out-of-date. This is natural. -New functionalities have been released. Warehouses change. Best practices are updated. Over the last year, I and others on the Fishtown Analytics (now dbt Labs!) team have conducted seven audits for clients who have been using dbt for a minimum of 2 months. +New functionalities have been released. Warehouses change. Best practices are updated. Over the last year, I and others on the Fishtown Analytics (now dbt Labs!) team have conducted seven audits for clients who have been using dbt for a minimum of 2 months. @@ -87,7 +87,7 @@ This post is the checklist I created to guide our internal work, and I’m shari ## ✅ Project structure ------------------------------------------------------------------------------------------------------------------------------------------------------ -* If you are using dimensional modeling techniques, do you have staging and marts models? +* If you are using techniques, do you have staging and marts models? * Do they use table prefixes like ‘fct\_’ and ‘dim\_’? * Is the code modular? Is it one transformation per one model? * Are you filtering as early as possible? @@ -119,7 +119,7 @@ This post is the checklist I created to guide our internal work, and I’m shari * Do you use refs and sources for everything? * Make sure nothing is querying off of raw tables, etc. ![no querying raw tables](/img/blog/checklist-8ddc2f76de24c98690ef986dcc7974bff09adb59.png) - + * Do you regularly run `dbt test` as part of your workflow and production jobs? * Do you use Jinja & Macros for repeated code? * If you do, is the balance met where it’s not being overused to the point code is not readable? @@ -156,7 +156,7 @@ This post is the checklist I created to guide our internal work, and I’m shari **Useful links** -* [Version control](/docs/guides/best-practices/#version-control-your-dbt-project) +* [Version control](/guides/legacy/best-practices#version-control-your-dbt-project) * [dbt Labs' PR Template](/blog/analytics-pull-request-template) ## ✅ Documentation @@ -200,7 +200,7 @@ Are you using the IDE and if so, how well? **Useful links** -* [dbt Cloud as a CI tool](/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github) +* [dbt Cloud as a CI tool](/docs/deploy/cloud-ci-job) ## ✅ DAG Auditing @@ -210,41 +210,41 @@ _Note: diagrams in this section show what NOT to do!_ * Does your DAG have any common modeling pitfalls? * Are there any direct joins from sources into an intermediate model? - + * All sources should have a corresponding staging model to clean and standardize the data structure. They should not look like the image below. - + ![bad dag](/img/blog/checklist-28c75101367e272fbc2db2ebb1a1ec030517bb5e_2_517x250.jpeg) - + * Do sources join directly together? - + * All sources should have a corresponding staging model to clean and standardize the data structure. They should not look like the image below. - + ![bad dag 2](/img/blog/checklist-5d8ad45deb695eb6771003e010b242c0a3c122b9_2_517x220.jpeg) - + * Are there any rejoining of upstream concepts? - + * This may indicate: * a model may need to be expanded so all the necessary data is available downstream * a new intermediate model is necessary to join the concepts for use in both places - + ![bad dag 2](/img/blog/checklist-acd57c0e781b1eaf75a65b5063f97ac3ddc5c493_2_517x136.jpeg) - + * Are there any “bending connections”? - + * Are models in the same layer dependent on each other? * This may indicate a change in naming is necessary, or the model should reference further upstream models - + ![bad dag 3](/img/blog/checklist-0532fd13a7d63e3e5df71d025700c4d9c158a7ff_2_517x155.jpeg) - + * Are there model fan outs of intermediate/dimension/fact models? - + * This might indicate some transformations should move to the BI layer, or transformations should be moved upstream * Your dbt project needs a defined end point! - + [![bad dag 4](/img/blog/checklist-33fcd7c4922233412d1364b39227c876d0cb8215_2_517x111.jpeg) - + * Is there repeated logic found in multiple models? - + * This indicates an opportunity to move logic into upstream models or create specific intermediate models to make that logic reusable * One common place to look for this is complex join logic. For example, if you’re checking multiple fields for certain specific values in a join, these can likely be condensed into a single field in an upstream model to create a clean, simple join. diff --git a/website/blog/2021-02-09-how-to-configure-your-dbt-repository-one-or-many.md b/website/blog/2021-02-09-how-to-configure-your-dbt-repository-one-or-many.md index 14985782bc3..60dc91278d2 100644 --- a/website/blog/2021-02-09-how-to-configure-your-dbt-repository-one-or-many.md +++ b/website/blog/2021-02-09-how-to-configure-your-dbt-repository-one-or-many.md @@ -60,7 +60,7 @@ This is the most common structure we see for dbt repository configuration. Thoug **Strengths** * Easy to share and maintain the same core business logic -* Full dependency lineage - your dbt generated DAG encompasses all of your data transformations for your entire company +* Full dependency lineage - your dbt generated DAG encompasses all of your data transformations for your entire company **Weaknesses** @@ -148,7 +148,7 @@ This approach is nearly identical to the former (completely separate repositorie * Does not prevent conflicting business logic or duplicate macros * All models must have unique names across all packages -\*\* The project will include the information from the dbt projects but might be missing information that is pulled from your data warehouse if you are on multiple Snowflake accounts/Redshift instances. This is because dbt is only able to query the information schema from that one connection. +\*\* The project will include the information from the dbt projects but might be missing information that is pulled from your if you are on multiple Snowflake accounts/Redshift instances. This is because dbt is only able to query the information schema from that one connection. ## So… to mono-repo or not to mono-repo? ------------------------------------------------------------------------------- diff --git a/website/blog/2021-09-11-sql-dateadd.md b/website/blog/2021-09-11-sql-dateadd.md index 0cde1df47cf..ad6ae7d7b08 100644 --- a/website/blog/2021-09-11-sql-dateadd.md +++ b/website/blog/2021-09-11-sql-dateadd.md @@ -43,7 +43,7 @@ The *functions themselves* are named slightly differently, which is common acros ```sql dateadd( {{ datepart }}, {{ interval }}, {{ from_date }} ) -``` +``` *Hour, minute and second are supported!* @@ -57,14 +57,14 @@ date_add( {{ startDate }}, {{ numDays }} ) ```sql date_add( {{ from_date }}, INTERVAL {{ interval }} {{ datepart }} ) -``` +``` *Dateparts of less than a day (hour / minute / second) are not supported.* ### The DATEADD Function in Postgres... Postgres doesn’t provide a dateadd function out of the box, so you’ve got to go it alone - but the syntax looks very similar to BigQuery’s function… - + ```sql {{ from_date }} + (interval '{{ interval }} {{ datepart }}') ``` @@ -81,7 +81,7 @@ I am sorry - that’s just a blank 2x2 matrix. I've surrendered to just searchin But couldn’t we be doing something better with those keystrokes, like typing out and then deleting a tweet? -dbt (and the [dbt_utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/#dateadd-source-macros-cross_db_utils-dateadd-sql-) macro package) helps us smooth out these wrinkles of writing SQL across data warehouses. +dbt (and the [dbt_utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/#dateadd-source-macros-cross_db_utils-dateadd-sql-) macro package) helps us smooth out these wrinkles of writing SQL across data warehouses. Instead of looking up the syntax each time you use it, you can just write it the same way each time, and the macro compiles it to run on your chosen warehouse: @@ -96,7 +96,7 @@ Adding 1 month to today would look like... ``` > *New to dbt? Check out [dbt introduction](https://docs.getdbt.com/docs/introduction) for more background on dbt and the analytics engineering workflow that it facilitates.* -> +> > *TL;DR: dbt allows data practitioners to write code like software engineers, which in this case means not repeating yourself unnecessarily.* ### Compiling away your DATEADD troubles @@ -141,7 +141,6 @@ And it’s actually quite a simple 31-line macro ([source here](https://github.c Enjoy! FYI I've used dateadd macro in dbt-utils on BigQuery, Postgres, Redshift and Snowflake, but it likely works across most other warehouses. -*Note: While `dbt_utils` doesn't support Databricks by default, you can use other packages that [implement overrides](https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch#overriding-package-macros) as a workaround.* - -*This [spark_utils package](https://github.com/dbt-labs/spark-utils/blob/main/macros/dbt_utils/cross_db_utils/dateadd.sql) can help you implement the override needed to add support for Databricks dateadd* +*Note: While `dbt_utils` doesn't support Databricks by default, you can use other packages that [implement overrides](/reference/dbt-jinja-functions/dispatch#overriding-package-macros) as a workaround.* +*This [spark_utils package](https://github.com/dbt-labs/spark-utils/blob/0.3.0/macros/dbt_utils/cross_db_utils/dateadd.sql) can help you implement the override needed to add support for Databricks dateadd* diff --git a/website/blog/2021-09-15-september-21-product-email.md b/website/blog/2021-09-15-september-21-product-email.md index 14985385bc4..9d8f6b76606 100644 --- a/website/blog/2021-09-15-september-21-product-email.md +++ b/website/blog/2021-09-15-september-21-product-email.md @@ -43,7 +43,7 @@ Give Jeremy a win and check out the [blog](http://blog.getdbt.com/getting-ready - Changelog and additional assets located [here.](https://docs.getdbt.com/docs/dbt-cloud/cloud-changelog?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-8nIpohDBSr7SvpXrqY-5ONmnjdIgW0XMiAPkjQTb9Pgwt24nzqAWNX2Xgtj8LA0LrPoHpD)* - [DAG in the IDE](https://blog.getdbt.com/on-dags-hierarchies-and-ides/?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-8nIpohDBSr7SvpXrqY-5ONmnjdIgW0XMiAPkjQTb9Pgwt24nzqAWNX2Xgtj8LA0LrPoHpD): We want users to have a frictionless experience when navigating between code and context. Embedding the DAG within the IDE makes investigating project structure a lot easier ![Screen Shot 2021-09-22 at 4.59.24 PM](https://hs-8698602.f.hubspotemail.net/hub/8698602/hubfs/Screen%20Shot%202021-09-22%20at%204.59.24%20PM.png?upscale=true&width=1120&upscale=true&name=Screen%20Shot%202021-09-22%20at%204.59.24%20PM.png) -- The [Metadata API](https://docs.getdbt.com/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-8nIpohDBSr7SvpXrqY-5ONmnjdIgW0XMiAPkjQTb9Pgwt24nzqAWNX2Xgtj8LA0LrPoHpD): Now in GA! Assess data health with the metadata generated by recent dbt job runs +- The [Metadata API](https://docs.getdbt.com/docs/dbt-cloud-apis/metadata-api?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-8nIpohDBSr7SvpXrqY-5ONmnjdIgW0XMiAPkjQTb9Pgwt24nzqAWNX2Xgtj8LA0LrPoHpD): Now in GA! Assess data health with the metadata generated by recent dbt job runs - [Dashboard Status Tiles](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-8nIpohDBSr7SvpXrqY-5ONmnjdIgW0XMiAPkjQTb9Pgwt24nzqAWNX2Xgtj8LA0LrPoHpD): Embed this tile anywhere iFrames live to quickly check data freshness ## New Resources  diff --git a/website/blog/2021-10-15-october-21-product-update-email.md b/website/blog/2021-10-15-october-21-product-update-email.md index b08b6de77e5..2affc5a837e 100644 --- a/website/blog/2021-10-15-october-21-product-update-email.md +++ b/website/blog/2021-10-15-october-21-product-update-email.md @@ -2,7 +2,7 @@ title: "October 2021 dbt Update: Metrics and Hat Tricks 🎩" description: "Also flagging that Coalesce is less than 3 weeks away! 😱" slug: dbt-product-update-2021-october -authors: [lauren_craigie] +authors: [lauren_craigie] tags: [dbt updates] hide_table_of_contents: false @@ -53,7 +53,7 @@ I've got three really exciting things to share this month! - [Model bottlenecks beta](https://getdbt.slack.com/archives/C02GUTGK73N?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ): Identify long-running models ripe for refactoring (or re-scheduling). The new model timing dashboard in the run detail page helps you quickly assess job composition, order, and duration to optimize your workflows and cut costs💰 - ![image-1](https://hs-8698602.f.hubspotemail.net/hub/8698602/hubfs/image-1.png?upscale=true&width=1120&upscale=true&name=image-1.png) + ![image-1](https://hs-8698602.f.hubspotemail.net/hub/8698602/hubfs/image-1.png?upscale=true&width=1120&upscale=true&name=image-1.png) The Model Timing tab in dbt Cloud highlights models taking particularly long to run. @@ -64,7 +64,7 @@ The Model Timing tab in dbt Cloud highlights models taking particularly long to ### Things to Try 🛠️ - Nearly *500* dbt Cloud accounts are using CI. Want to know why? (or maybe... *how?*) Julia breaks it down in her [latest blog](https://blog.getdbt.com/adopting-ci-cd-with-dbt-cloud/?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ) and shares how to choose and configure continuous delivery or continuous deployment at your organization. -- Hex just [launched an integration](https://hex.tech/blog/dbt-integration?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ) with dbt! It uses the [dbt Cloud Metadata API](https://docs.getdbt.com/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ) to surface metadata from dbt right in Hex, letting you quickly get the context you need on things like data freshness without juggling multiple apps and browser tabs. Get started [here](https://docs.hex.tech/connecting-to-data/configuring-data-connections/dbt-integration?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ). +- Hex just [launched an integration](https://hex.tech/blog/dbt-integration?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ) with dbt! It uses the [dbt Cloud Metadata API](https://docs.getdbt.com/docs/dbt-cloud-apis/metadata-api?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ) to surface metadata from dbt right in Hex, letting you quickly get the context you need on things like data freshness without juggling multiple apps and browser tabs. Get started [here](https://docs.hex.tech/connecting-to-data/configuring-data-connections/dbt-integration?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ). - The [dbt-Rockset adapter](https://github.com/rockset/dbt-rockset?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ) (now in beta) just received a major update. It now supports View, Table, Incremental, and Ephemeral materializations to help you perform real-time data transformations on Rockset. Read more [here.](https://rockset.com/blog/real-time-data-transformations-dbt-rockset?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ). ### Things to Read 📚 @@ -78,10 +78,10 @@ The Model Timing tab in dbt Cloud highlights models taking particularly long to At the Future Data Conference last week Tristan noted that data workflows borrow much from software engineering, but haven't really crossed the DevOps chasm. What's missing? Spreadsheets? Actually... *maybe.* 😅 Okay you had to be there. Luckily you still can! Check out the [recording](https://futuredata.brighttalk.live/talk/19069-506932/?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ). - [Modeling behavioral data with Snowplow and dbt](https://get.snowplowanalytics.com/wbn/dbt-and-snowplow/data-modeling/?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ) (coming up on 10/27). Our own Sanjana Sen joins the Snowplow team to talk modeling Snowplow event data in dbt -- including how to structure your data models, best practices to follow, and key pitfalls to avoid. -- [How Blend Eliminated Data Silos with dbt and Hightouch](https://hightouch.io/dbt-hightouch-blend-event/?utm_campaign=Monthly%20Product%20Updates&utm_source=hs_email&utm_medium=email&_hsenc=p2ANqtz-_wfy8vfjMjwQ7o8TXEOVz-oXI35iVcVP1HtAvriVHfJoAd1IcsP-MCww6vJUDlvAfiuQjZ) (coming up 10/28). +- How Blend Eliminated Data Silos with dbt and Hightouch. Fin-tech behemoth, Blend, processes trillions of dollars in loans (and recently IPO'd). Join this talk with William Tsu (Customer Success Operations at Blend) to learn how adopting dbt and Hightouch has helped them overcome data silos to keep kicking a$$. - + That's all for now! Thanks for reading, and as always, *let me know if there's anything else you want to see in these updates!* *Lauren Craigie* diff --git a/website/blog/2021-11-22-dbt-labs-pr-template.md b/website/blog/2021-11-22-dbt-labs-pr-template.md index 7ecec933704..4460b27d494 100644 --- a/website/blog/2021-11-22-dbt-labs-pr-template.md +++ b/website/blog/2021-11-22-dbt-labs-pr-template.md @@ -70,7 +70,7 @@ Checking for things like modularity and 1:1 relationships between sources and st #### Validation of models: -This section should show something to confirm that your model is doing what you intended it to do. This could be a [dbt test](https://docs.getdbt.com/docs/building-a-dbt-project/tests) like uniqueness or not null, or could be an ad-hoc query that you wrote to validate your data. Here is a screenshot from a test run on a local development branch: +This section should show something to confirm that your model is doing what you intended it to do. This could be a [dbt test](/docs/build/tests) like uniqueness or not null, or could be an ad-hoc query that you wrote to validate your data. Here is a screenshot from a test run on a local development branch: ![test validation](/img/blog/pr-template-test-validation.png "dbt test validation") diff --git a/website/blog/2021-11-22-primary-keys.md b/website/blog/2021-11-22-primary-keys.md index fe29d477260..84c92055eb0 100644 --- a/website/blog/2021-11-22-primary-keys.md +++ b/website/blog/2021-11-22-primary-keys.md @@ -51,7 +51,7 @@ In the days before testing your data was commonplace, you often found out that y ## How to test primary keys with dbt -Today, you can add two simple [dbt tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests) onto your primary keys and feel secure that you are going to catch the vast majority of problems in your data. +Today, you can add two simple [dbt tests](/docs/build/tests) onto your primary keys and feel secure that you are going to catch the vast majority of problems in your data. Not surprisingly, these two tests correspond to the two most common errors found on your primary keys, and are usually the first tests that teams testing data with dbt implement: @@ -89,7 +89,7 @@ Having tests configured and running in production using the [`dbt test`](https:/ Does your warehouse even _support_ primary keys at all? If it does, how can you actually find out if a table has a primary key set, and what that primary key is? -Let’s walk through primary key support + access across the major cloud data warehouse platforms. +Let’s walk through primary key support + access across the major cloud platforms. ### TL;DR on primary key support across warehouses diff --git a/website/blog/2021-11-22-sql-surrogate-keys.md b/website/blog/2021-11-22-sql-surrogate-keys.md index c9cbabeb16f..05422ba3506 100644 --- a/website/blog/2021-11-22-sql-surrogate-keys.md +++ b/website/blog/2021-11-22-sql-surrogate-keys.md @@ -156,7 +156,7 @@ output: | `null` | 123 | \|123 | -Let’s take a look at how generating surrogate keys specifically looks in practice across data warehouses, and how you can use one simple dbt macro ([dbt_utils.surrogate_key](https://github.com/dbt-labs/dbt-utils#surrogate_key-source)) to abstract away the null value problem. +Let’s take a look at how generating surrogate keys specifically looks in practice across data warehouses, and how you can use one simple dbt macro ([dbt_utils.surrogate_key](https://github.com/dbt-labs/dbt-utils#surrogate_key-source)) to abstract away the null value problem. ### A surrogate_key macro to the rescue diff --git a/website/blog/2021-11-23-how-to-upgrade-dbt-versions.md b/website/blog/2021-11-23-how-to-upgrade-dbt-versions.md index 954bca83925..87b3ea7bd1e 100644 --- a/website/blog/2021-11-23-how-to-upgrade-dbt-versions.md +++ b/website/blog/2021-11-23-how-to-upgrade-dbt-versions.md @@ -62,7 +62,7 @@ As noted above, the project is on 0.16.0 right now. 0.17.2 is the final patch re > > Practically, it also lets you lock in "checkpoints" of known-stable setups. If you need to pause your migration work to deal with an urgent request, you can safely deploy what you've finished so far instead of having a bunch of unrelated half-finished changes. -Review the migration guides to get an initial indication of what changes you might need to make. For example, in [the migration guide for 0.17.0](https://docs.getdbt.com/docs/guides/migration-guide/upgrading-to-0-17-0), there are several significant changes to dbt's functionality, but it's unlikely that all of them will apply to your project. We'll cover this more later. +Review the migration guides to get an initial indication of what changes you might need to make. For example, in [the migration guide for 0.17.0](/guides/migration/versions), there are several significant changes to dbt's functionality, but it's unlikely that all of them will apply to your project. We'll cover this more later. ## Step 2: `Add require-dbt-version` to your `dbt_project.yml` file. @@ -88,7 +88,7 @@ You can add an upper bound of supported versions like this: `[">=0.20.0", "<=1.0 ## Step 3: Upgrade dbt -If you use dbt Cloud, you can upgrade [as described here](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version). Take note of the recommendation to create a second "sandbox" project, so that your experimentation doesn’t impact the rest of the team. For dbt Core, upgrade instructions will vary based on your [original installation method](https://docs.getdbt.com/dbt-cli/installation). +If you use dbt Cloud, you can upgrade [as described here](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version). We recommend that you [create a second "sandbox" project](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions#testing-your-changes-before-upgrading), so that your experimentation doesn’t impact the rest of the team. For dbt Core, upgrade instructions will vary based on your [original installation method](https://docs.getdbt.com/dbt-cli/installation). ## Step 4: Try to run `dbt compile` @@ -126,9 +126,9 @@ In this case, our example project probably has dbt 0.3.0 installed. By reviewing ### Step 5b. Fix errors, then warnings -Obviously, errors that stop you from running your dbt project at all are the most important to deal with. Let's assume that our project used a too-broadly-scoped variable in a macro file, support for which was removed in v0.17. The [migration guide explains what to do instead](https://docs.getdbt.com/docs/guides/migration-guide/upgrading-to-0-17-0#macros-no-longer-see-variables-defined-outside-of-macro-blocks), and it's a pretty straightforward fix. +Obviously, errors that stop you from running your dbt project at all are the most important to deal with. Let's assume that our project used a too-broadly-scoped variable in a macro file, support for which was removed in v0.17. The [migration guide explains what to do instead](/guides/migration/versions), and it's a pretty straightforward fix. -Once your errors are out of the way, have a look at warnings. For example, 0.17 introduced `config-version: 2` to `dbt_project.yml`. Although it's backwards compatible for now, we know that support for the old version will be removed in a future version of dbt so we might as well deal with it now. Again, the migration guide explains [what we need to do](https://docs.getdbt.com/docs/guides/migration-guide/upgrading-to-0-17-0#upgrading-instructions), and how to take full advantage of the new functionality in the future. +Once your errors are out of the way, have a look at warnings. For example, 0.17 introduced `config-version: 2` to `dbt_project.yml`. Although it's backwards compatible for now, we know that support for the old version will be removed in a future version of dbt so we might as well deal with it now. Again, the migration guide explains [what we need to do](/guides/migration/versions), and how to take full advantage of the new functionality in the future. ### Stay focused diff --git a/website/blog/2021-11-23-so-you-want-to-build-a-package.md b/website/blog/2021-11-23-so-you-want-to-build-a-package.md index 744166a1674..c8df47d04ce 100644 --- a/website/blog/2021-11-23-so-you-want-to-build-a-package.md +++ b/website/blog/2021-11-23-so-you-want-to-build-a-package.md @@ -25,7 +25,7 @@ If you’re considering making a package, you probably already know what one is Packages are a way to share code in dbt without ever having to copy and paste (or *email* :screaming face:). -Let’s break down the [dateadd macro](https://github.com/dbt-labs/dbt-utils/blob/main/macros/cross_db_utils/dateadd.sql) from the dbt_utils macro to show you the process that created this fantastic macro. +Let’s break down the [dateadd macro](https://github.com/dbt-labs/dbt-utils/blob/0.1.20/macros/cross_db_utils/dateadd.sql) from the dbt_utils macro to show you the process that created this fantastic macro. The problem: Analysts often need to add an interval to a timestamp/date. To make this cross-database and standardized across a project, a macro is needed. @@ -39,7 +39,7 @@ The problem: Analysts often need to add an interval to a timestamp/date. To make ``` -In this section, we are using the [dispatch](https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch) Jinja reference to enable the right macro from the rest of the file is called (since they are specific to the adapter) when a user called the macro. This means the user doesn’t have to think about what to call based on the adapter, they just need to call one macro, dbt handles it all behind the scene. +In this section, we are using the [dispatch](/reference/dbt-jinja-functions/dispatch) Jinja reference to enable the right macro from the rest of the file is called (since they are specific to the adapter) when a user called the macro. This means the user doesn’t have to think about what to call based on the adapter, they just need to call one macro, dbt handles it all behind the scene. ```sql diff --git a/website/blog/2021-11-29-dbt-airflow-spiritual-alignment.md b/website/blog/2021-11-29-dbt-airflow-spiritual-alignment.md index f3092b766cd..9edcb84fd4f 100644 --- a/website/blog/2021-11-29-dbt-airflow-spiritual-alignment.md +++ b/website/blog/2021-11-29-dbt-airflow-spiritual-alignment.md @@ -22,7 +22,7 @@ In my experience, these are false dichotomies, that sound great as hot takes but -In my days as a data consultant and now as a member of the dbt Labs Solutions Architecture team, I’ve frequently seen Airflow, dbt Core & dbt Cloud ([via the API](https://docs.getdbt.com/dbt-cloud/api-v2)) blended as needed, based on the needs of a specific data pipeline, or a team’s structure and skillset. +In my days as a data consultant and now as a member of the dbt Labs Solutions Architecture team, I’ve frequently seen Airflow, dbt Core & dbt Cloud ([via the official provider](https://registry.astronomer.io/providers/dbt-cloud?type=Operators&utm_campaign=Monthly+Product+Updates&utm_medium=email&_hsmi=208603877&utm_content=208603877&utm_source=hs_email)) blended as needed, based on the needs of a specific data pipeline, or a team’s structure and skillset. More fundamentally, I think it’s important to call out that Airflow + dbt are **spiritually aligned** in purpose. They both exist to facilitate clear communication across data teams, in service of producing trustworthy data. @@ -90,8 +90,8 @@ So instead of getting bogged down in defining roles, let’s focus on hard skill The common skills needed for implementing any flavor of dbt (Core or Cloud) are: * SQL: ‘nuff said -* YAML: required to generate config files for [writing tests on data models](/docs/building-a-dbt-project/tests) -* [Jinja](/tutorial/learning-more/using-jinja): allows you to write DRY code (using [macros](/docs/building-a-dbt-project/jinja-macros), for loops, if statements, etc) +* YAML: required to generate config files for [writing tests on data models](/docs/build/tests) +* [Jinja](/guides/getting-started/learning-more/using-jinja): allows you to write DRY code (using [macros](/docs/building-a-dbt-project/jinja-macros), for loops, if statements, etc) YAML + Jinja can be learned pretty quickly, but SQL is the non-negotiable you’ll need to get started. @@ -123,8 +123,6 @@ When a dbt run fails within an Airflow pipeline, an engineer monitoring the over dbt provides common programmatic interfaces (the [dbt Cloud Admin + Metadata APIs](/docs/dbt-cloud/dbt-cloud-api/cloud-apis), and [.json-based artifacts](/reference/artifacts/dbt-artifacts) in the case of dbt Core) that provide the context needed for the engineer to self-serve—either by rerunning from a point of failure or reaching out to the owner. -![dbt run log](/img/blog/airflow-dbt-run-log.png "dbt run log") - ## Why I ❤️ dbt Cloud + Airflow dbt Core is a fantastic framework for developing data transformation + testing logic. It is less fantastic as a shared interface for data analysts + engineers to collaborate **_on production runs of transformation jobs_**. @@ -147,7 +145,7 @@ This can be perfectly ok, in the event your data team is structured for data eng Once the data has been ingested, dbt Core can be used to model it for consumption. Most of the time, users choose to either: Use the dbt CLI+ [BashOperator](https://registry.astronomer.io/providers/apache-airflow/modules/bashoperator) with Airflow (If you take this route, you can use an external secrets manager to manage credentials externally), or -Use the [KubernetesPodOperator](https://registry.astronomer.io/providers/kubernetes/modules/kubernetespodoperator) for each dbt job, as data teams have at places like [Gitlab](https://gitlab.com/gitlab-data/analytics/-/blob/master/dags/transformation/dbt_trusted_data.py#L72) and [Snowflake](https://www.snowflake.com/blog/migrating-airflow-from-amazon-ec2-to-kubernetes/). +Use the [KubernetesPodOperator](https://registry.astronomer.io/providers/kubernetes/modules/kubernetespodoperator) for each dbt job, as data teams have at places like [Gitlab](https://gitlab.com/gitlab-data/analytics/-/blob/master/dags/transformation/dbt_trusted_data.py#L72) and [Snowflake](https://www.snowflake.com/blog/migrating-airflow-from-amazon-ec2-to-kubernetes/). Both approaches are equally valid; the right one will depend on the team and use case at hand. @@ -191,29 +189,11 @@ This means that whether you’re actively developing or you simply want to rerun ### dbt Cloud + Airflow -With dbt Cloud and its aforementioned [APIs](https://docs.getdbt.com/docs/dbt-cloud/dbt-cloud-api/cloud-apis), any dbt user can configure dbt runs from the UI. - -In Airflow, engineers can then call the API, and everyone can move on with their lives. This allows the API to be a programmatic interface between analysts and data engineers, vs relying on the human interface. - -If you look at what this practically looks like in code (my [airflow-toolkit repo is here](https://github.com/sungchun12/airflow-toolkit/blob/demo-sung/dags/examples/dbt_cloud_example.py)), just a few settings need to be configured after you create the initial python API call: [here](https://github.com/sungchun12/airflow-toolkit/blob/95d40ac76122de337e1b1cdc8eed35ba1c3051ed/dags/dbt_cloud_utils.py) - -``` - -dbt_cloud_job_runner_config = dbt_cloud_job_runner( - - account_id=4238, project_id=12220, job_id=12389, cause=dag_file_name - -) - -``` - -If the operator fails, it’s an Airflow problem. If the dbt run returns a model or test failure, it’s a dbt problem and the analyst can be notified to hop into the dbt Cloud UI to debug. - -#### Using the new dbt Cloud Provider +#### Using the dbt Cloud Provider With the new dbt Cloud Provider, you can use Airflow to orchestrate and monitor your dbt Cloud jobs without any of the overhead of dbt Core. Out of the box, the dbt Cloud provider comes with: -An operator that allows you to both [run a predefined job in dbt Cloud and download an artifact from a dbt Cloud job](https://registry.astronomer.io/dags/example-dbt-cloud). +An operator that allows you to both run a predefined job in dbt Cloud and download an artifact from a dbt Cloud job. A hook that gives you a secure way to leverage Airflow’s connection manager to connect to dbt Cloud. The Operator leverages the hook, but you can also [use the hook directly in a Taskflow function or PythonOperator](https://registry.astronomer.io/dags/dbt-cloud-operational-check) if there’s custom logic you need that isn’t covered in the Operator. A sensor that allows you to poll for a job completion. You can use this [for workloads where you want to ensure your dbt job has run before continuing on with your DAG](https://registry.astronomer.io/dags/fivetran-dbt-cloud-census). @@ -221,26 +201,7 @@ TL;DR - This combines the end-to-end visibility of everything (from ingestion th #### Setting up Airflow and dbt Cloud -To set up Airflow and dbt Cloud, you can: - - -1. Set up a dbt Cloud job, as in the example below. - -![job settings](/img/blog/2021-11-29-dbt-airflow-spiritual-alignment/job-settings.png) - -2. Set up an Airflow Connection ID - -![airflow dbt run select](/img/blog/2021-11-29-dbt-airflow-spiritual-alignment/airflow-connection-ID.png) - -3. Set up your Airflow DAG similar to [this example](https://github.com/apache/airflow/blob/main/airflow/providers/dbt/cloud/example_dags/example_dbt_cloud.py). - -4. You can use Airflow to call the dbt Cloud API via the new `DbtCloudRunJobOperator` to run the job and monitor it in real time through the dbt Cloud interface. - -![dbt Cloud API graph](/img/blog/2021-11-29-dbt-airflow-spiritual-alignment/dbt-Cloud-API-graph.png) - -![Monitor Job Runs](/img/blog/2021-11-29-dbt-airflow-spiritual-alignment/Monitor-Job-Runs.png) - -![run number](/img/blog/2021-11-29-dbt-airflow-spiritual-alignment/run-number.png) +To set up Airflow and dbt Cloud, you can follow the step by step instructions: [here](https://docs.getdbt.com/guides/orchestration/airflow-and-dbt-cloud/2-setting-up-airflow-and-dbt-cloud) If your task errors or fails in any of the above use cases, you can view the logs within dbt Cloud (think: data engineers can trust analytics engineers to resolve errors). diff --git a/website/blog/2021-11-29-open-source-community-growth.md b/website/blog/2021-11-29-open-source-community-growth.md index ac1b0381f85..a61fa7ac46b 100644 --- a/website/blog/2021-11-29-open-source-community-growth.md +++ b/website/blog/2021-11-29-open-source-community-growth.md @@ -465,7 +465,7 @@ A lineage graph of the entire pipeline can now be viewed in Marquez, which shows This is the simplest part, by far. Since we have a set of tables with clearly-defined measures and dimensions, getting everything working in a system like Apache Superset is straightforward. -Configuring the data source and adding each table to a Preset Workspace was easy. First, I connected my BigQuery database by uploading a JSON key for my service account. +Configuring the data source and adding each table to a Preset Workspace was easy. First, I connected my BigQuery database by uploading a key for my service account. Once the database connection was in place, I created datasets for each of my `*_daily_summary` tables by selecting the database/schema/table from a dropdown. diff --git a/website/blog/2021-11-29-the-missing-role-of-design-in-analytics.md b/website/blog/2021-11-29-the-missing-role-of-design-in-analytics.md index 8487cd601e1..94338cbff5c 100644 --- a/website/blog/2021-11-29-the-missing-role-of-design-in-analytics.md +++ b/website/blog/2021-11-29-the-missing-role-of-design-in-analytics.md @@ -12,18 +12,18 @@ date: 2021-11-29 is_featured: false --- -If you’ve spoken to me lately, follow me on [Twitter](https://twitter.com/sethrosen), or have taken my order at [Wendy’s](https://i.ytimg.com/vi/_oMc4eD9-XM/maxresdefault.jpg), you probably know how much I hate traditional dashboards. My dad, a psychotherapist, has been working with me to get to the root of my upbringing that led to this deep-rooted feeling. +If you’ve spoken to me lately, follow me on [Twitter](https://twitter.com/sethrosen), or have taken my order at [Wendy’s](https://i.ytimg.com/vi/_oMc4eD9-XM/maxresdefault.jpg), you probably know how much I hate traditional dashboards. My dad, a psychotherapist, has been working with me to get to the root of my upbringing that led to this deep-rooted feeling. -As it turns out, the cause of my feelings towards traditional dashboarding are actually quite obvious. Before entering the field of data, I spent my entire career as a product manager working alongside user experience designers and engineers on cross-functional product teams. +As it turns out, the cause of my feelings towards traditional dashboarding are actually quite obvious. Before entering the field of data, I spent my entire career as a product manager working alongside user experience designers and engineers on cross-functional product teams. -When building software, getting users to actually use the product is no easy feat. The smallest amount of friction can cause a user to abandon the flow. Add enough friction to any product and users and engagement will drop dramatically. As analysts, we know this intuitively. We are constantly measuring retention, cohorts, and engagement within our business. +When building software, getting users to actually use the product is no easy feat. The smallest amount of friction can cause a user to abandon the flow. Add enough friction to any product and users and engagement will drop dramatically. As analysts, we know this intuitively. We are constantly measuring retention, cohorts, and engagement within our business. -These principles also apply to analytics. The more friction there is in analytics and the less we focus on the user, the less our output will be used. So it makes me wonder why, within the field of data, is design thinking often so absent? +These principles also apply to analytics. The more friction there is in analytics and the less we focus on the user, the less our output will be used. So it makes me wonder why, within the field of data, is design thinking often so absent? -## Why are we lacking design thinking in analytics? +## Why are we lacking design thinking in analytics? Painting with broad strokes, design is generally not something that is a top priority for data teams. There are a few primary reasons that I see off the top: @@ -35,7 +35,7 @@ Painting with broad strokes, design is generally not something that is a top pri These more or less boil down to data teams running like service teams rather than [product teams](https://locallyoptimistic.com/post/run-your-data-team-like-a-product-team/)—when you always give the squeakiest wheel the grease, it’s impossible to put in the strategic effort that design thinking requires. -One solution I proposed back in 2019 is hiring a [data product manager](https://www.linkedin.com/pulse/why-your-organization-may-need-data-product-manager-seth-rosen/), which seems to be picking up a bit of steam. But what would that person actually do? +One solution I proposed back in 2019 is hiring a [data product manager](https://www.hashpath.com/2019/11/why-your-organization-may-need-a-data-product-manager/), which seems to be picking up a bit of steam. But what would that person actually do? I have attempted to sum the solutions to these complex issues into a table of overly-simplified do’s and don’ts: @@ -85,9 +85,9 @@ An analyst who can gather the necessary data, transform it into the analytics-re If you think about the workflow for an analyst, a simple process might go something like this: 1. Initial exploratory analysis and ad-hoc queries -2. Model data in dbt +2. Model data in dbt 3. Build out data visualizations -4. Write tests/monitor performance +4. Write tests/monitor performance But, you might also consider adding the following to your workflow. @@ -140,7 +140,7 @@ Here are a few examples: - Minimal software development lifecycle + Minimal software development lifecycle Strong SDLC to promote user trust @@ -176,17 +176,17 @@ While the data being presented in the weather app could technically be presented When it is done right, the user has everything they need to make quick decisions and take appropriate actions. -It’s worth noting this simple weather app is purpose-built for everyday weather situations. There are some use cases where highly specific information may be needed by a particular subset of users. +It’s worth noting this simple weather app is purpose-built for everyday weather situations. There are some use cases where highly specific information may be needed by a particular subset of users. For example, surfers want different information. Ultimately, they may want an overall "Surf or don't surf" recommendation. Additionally, pilots could never simply rely on AccuWeather. These use cases warrant their own user-centered, purpose-built experiences. ## The missing piece of the puzzle -While a design process can help you build better analytics output, there is still a missing part of the analytics stack to enable true user-centered design. +While a design process can help you build better analytics output, there is still a missing part of the analytics stack to enable true user-centered design. -How would you, today, build out a weather-like application? +How would you, today, build out a weather-like application? -Traditional dashboarding tools limit the user experience and prevent purpose-built applications from being created. Luckily our tools are evolving to meet the needs of the data consumer. +Traditional dashboarding tools limit the user experience and prevent purpose-built applications from being created. Luckily our tools are evolving to meet the needs of the data consumer. I encourage you to explore these new tools as much as possible and to work design into your analytics workflows. I’m always up for chatting, especially about this - my DMs are always open on [Twitter](https://twitter.com/sethrosen). diff --git a/website/blog/2021-12-05-how-to-build-a-mature-dbt-project-from-scratch.md b/website/blog/2021-12-05-how-to-build-a-mature-dbt-project-from-scratch.md index f0154a8e17d..7149f5a49b8 100644 --- a/website/blog/2021-12-05-how-to-build-a-mature-dbt-project-from-scratch.md +++ b/website/blog/2021-12-05-how-to-build-a-mature-dbt-project-from-scratch.md @@ -53,9 +53,9 @@ Let’s pretend that we are an analytics engineer at Seeq Wellness, a hypothetic **Key Outcomes** -* Create your first [model](https://docs.getdbt.com/docs/building-a-dbt-project/building-models) +* Create your first [model](/docs/building-a-dbt-project/building-models) -* Execute your first [dbt run](https://docs.getdbt.com/reference/commands/run) +* Execute your first [dbt run](/reference/commands/run) ![image alt text](/img/blog/building-a-mature-dbt-project-from-scratch/image_1.png) @@ -67,13 +67,13 @@ The goal here is to learn the very basics of interacting with a dbt project; fee In addition to learning the basic pieces of dbt, we're familiarizing ourselves with the modern, version-controlled analytics engineering workflow, and experimenting with how it feels to use it at our organization. -If we decide not to do this, we end up missing out on what the dbt workflow has to offer. If you want to learn more about why we think analytics engineering with dbt is the way to go, I encourage you to read the [dbt Viewpoint](https://docs.getdbt.com/docs/about/viewpoint)! +If we decide not to do this, we end up missing out on what the dbt workflow has to offer. If you want to learn more about why we think analytics engineering with dbt is the way to go, I encourage you to read the [dbt Viewpoint](/docs/about/viewpoint)! -In order to learn the basics, we’re going to [port over the SQL file](https://docs.getdbt.com/tutorial/refactoring-legacy-sql) that powers our existing "patient_claim_summary" report that we use in our KPI dashboard in parallel to our old transformation process. We’re not ripping out the old plumbing just yet. In doing so, we're going to try dbt on for size and get used to interfacing with a dbt project. +In order to learn the basics, we’re going to [port over the SQL file](/docs/get-started/learning-more/refactoring-legacy-sql) that powers our existing "patient_claim_summary" report that we use in our KPI dashboard in parallel to our old transformation process. We’re not ripping out the old plumbing just yet. In doing so, we're going to try dbt on for size and get used to interfacing with a dbt project. **Project Appearance** -We have one single SQL model in our models folder, and really, that's it. At this stage, the README and dbt_project.yml are just artifacts from the [dbt init command](https://www.google.com/url?q=https://docs.getdbt.com/reference/commands/init&sa=D&source=docs&ust=1636059050382000&usg=AOvVaw3spuGmh9--dlfaGWanf0eZ), and don’t yet have specific documentation or configuration. At this stage of our journey, we just want to get up and running with a functional dbt project. +We have one single SQL model in our models folder, and really, that's it. At this stage, the README and dbt_project.yml are just artifacts from the [dbt init command](/reference/commands/init), and don’t yet have specific documentation or configuration. At this stage of our journey, we just want to get up and running with a functional dbt project. ![image alt text](/img/blog/building-a-mature-dbt-project-from-scratch/image_2.png) @@ -83,11 +83,11 @@ The most important thing we’re introducing when your project is an infant is t **Key Outcomes** -* Configure your first [sources](https://docs.getdbt.com/docs/building-a-dbt-project/using-sources) +* Configure your first [sources](/docs/building-a-dbt-project/using-sources) -* Introduce modularity with [{{ ref() }}](https://docs.getdbt.com/reference/dbt-jinja-functions/ref) and [{{ source() }}](https://docs.getdbt.com/reference/dbt-jinja-functions/source) +* Introduce modularity with [{{ ref() }}](/reference/dbt-jinja-functions/ref) and [{{ source() }}](/reference/dbt-jinja-functions/source) -* [Document](https://docs.getdbt.com/docs/building-a-dbt-project/documentation) and [test](https://docs.getdbt.com/docs/building-a-dbt-project/tests) your first models +* [Document](/docs/building-a-dbt-project/documentation) and [test](/docs/build/tests) your first models ![image alt text](/img/blog/building-a-mature-dbt-project-from-scratch/image_3.png) @@ -99,9 +99,9 @@ Specifically, now is when it's useful to introduce **_modularity_** to our proje We’re going to: -* Break out reused code into separate models and use [{{ ](https://docs.getdbt.com/reference/dbt-jinja-functions/ref)[ref](https://docs.getdbt.com/reference/dbt-jinja-functions/ref)[() }}](https://docs.getdbt.com/reference/dbt-jinja-functions/ref) to build dependencies +* Break out reused code into separate models and use [{{ ](/reference/dbt-jinja-functions/ref)[ref](/reference/dbt-jinja-functions/ref)[() }}](/reference/dbt-jinja-functions/ref) to build dependencies -* Use the[ {{ source() }}](https://docs.getdbt.com/reference/commands/source) macro to declare our raw data dependencies +* Use the[ {{ source() }}](/reference/commands/source) macro to declare our raw data dependencies * Dip our toes into testing and documenting our models @@ -125,7 +125,7 @@ Leveling up from infant to toddler is a huge jump in terms of feature completene * Create a PR template to ensure quality and consistency -* [Deploy your project](https://docs.getdbt.com/docs/running-a-dbt-project/running-dbt-in-production)! +* [Deploy your project](/docs/running-a-dbt-project/running-dbt-in-production)! ![image alt text](/img/blog/building-a-mature-dbt-project-from-scratch/image_5.png) @@ -151,11 +151,11 @@ Even though we haven't changed the function of a lot of our features *codifying **Key Outcomes** -* Leverage code from dbt [packages](https://docs.getdbt.com/docs/building-a-dbt-project/package-management) +* Leverage code from dbt [packages](/docs/building-a-dbt-project/package-management) * Increase model flexibility and scope of project -* Reduce dbt production build times with [advanced materializations](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/materializations) +* Reduce dbt production build times with [advanced materializations](/docs/building-a-dbt-project/building-models/materializations) ![image alt text](/img/blog/building-a-mature-dbt-project-from-scratch/image_7.png) @@ -179,7 +179,7 @@ We've spent this level focused on deepening and optimizing our feature set — w **Key Outcomes** -* Formalize dbt’s relationship to BI with [exposures](https://docs.getdbt.com/docs/building-a-dbt-project/exposures)! +* Formalize dbt’s relationship to BI with [exposures](/docs/build/exposures)! * Advanced use of metadata @@ -193,7 +193,7 @@ At this point, like we started to do in adolescence, we are going to focus on th We see the biggest jump from the previous stage in the [macros folder](https://github.com/dbt-labs/dbt-project-maturity/tree/main/5-adulthood/macros). By introducing advanced macros that go beyond simple SQL templating, we’re able to have dbt deepen its relationship to our warehouse. Now we can have dbt manage things like custom schema behavior, run post hooks to drop retired models and dynamically orchestrate object permission controls; dbt itself can become your command post for warehouse management. -Additionally, we’ve added an exposures file to formally define the use of our marts models in our BI tool. Exposures are the most mature way to declare the data team's contracts with data consumers. We now have close to end-to-end awareness of the data lineage — we know what data our project depends on, whether it's fresh, how it is transformed in our dbt models, and finally where it’s consumed in reports. Now, we can also know which of our key reports are impacted if and when we hit an error at any point in this flow. +Additionally, we’ve added an exposures file to formally define the use of our marts models in our BI tool. Exposures are the most mature way to declare the data team's contracts with data consumers. We now have close to end-to-end awareness of the data lineage — we know what data our project depends on, whether it's fresh, how it is transformed in our dbt models, and finally where it’s consumed in reports. Now, we can also know which of our key reports are impacted if and when we hit an error at any point in this flow. That end to end awareness is visible on the DAG too — we can see the dashboard we declared in our exposures file here in orange! diff --git a/website/blog/2022-02-07-customer-360-view-census-playbook.md b/website/blog/2022-02-07-customer-360-view-census-playbook.md index a6840ed2745..b3681edc1cd 100644 --- a/website/blog/2022-02-07-customer-360-view-census-playbook.md +++ b/website/blog/2022-02-07-customer-360-view-census-playbook.md @@ -30,7 +30,7 @@ In short, a jaffle is: *See above: Tasty, tasty jaffles.* -Jaffle Shop is a demo repo referenced in [dbt’s Getting Started tutorial](https://docs.getdbt.com/tutorial/getting-started), and its jaffles hold a special place in the dbt community’s hearts, as well as on Data Twitter™. +Jaffle Shop is a demo repo referenced in [dbt’s Getting Started Guide](/docs/get-started/getting-started/overview), and its jaffles hold a special place in the dbt community’s hearts, as well as on Data Twitter™. ![jaffles on data twitter](/img/blog/2022-02-08-customer-360-view/image_1.png) @@ -75,7 +75,7 @@ JaffleGaggle has to keep track of information about their interactions with thei All of these questions require aggregating + syncing data from application usage, workspace information, and orders into the CRM for the sales team to have at their fingertips. -This aggregation process requires an analytics warehouse, as all of these things need to be synced together outside of the application database itself to incorporate other data sources (billing / events information, past touchpoints in the CRM, etc). Thus, we can create our fancy customer 360 within JaffleGaggle’s data warehouse, which is a standard project for a B2B company’s data team. +This aggregation process requires an analytics warehouse, as all of these things need to be synced together outside of the application database itself to incorporate other data sources (billing / events information, past touchpoints in the CRM, etc). Thus, we can create our fancy customer 360 within JaffleGaggle’s , which is a standard project for a B2B company’s data team. **Diving into data modeling** diff --git a/website/blog/2022-02-23-founding-an-AE-team-smartsheet.md b/website/blog/2022-02-23-founding-an-AE-team-smartsheet.md index 5e65586c7d4..a6e80f9ccdd 100644 --- a/website/blog/2022-02-23-founding-an-AE-team-smartsheet.md +++ b/website/blog/2022-02-23-founding-an-AE-team-smartsheet.md @@ -30,7 +30,7 @@ Enter this story. I’m Nate and I manage the Analytics Engineering team at [Sma ## State of Analytics Before Analytics Engineering -Smartsheet, in general, has a great analytics setup. Strong data engineering and data analytics teams. A cloud data warehouse and an on-prem BI tool for front-end data visibility.  However, even with that foundation, there were some limitations under the hood requiring action: +Smartsheet, in general, has a great analytics setup. Strong data engineering and data analytics teams. A cloud and an on-prem BI tool for front-end data visibility.  However, even with that foundation, there were some limitations under the hood requiring action: ### (1) Multiple undocumented transformation databases @@ -174,7 +174,7 @@ Plenty more to make but the foundation was there, and now it was time to enroll This topic also deserves its own blog post, but I’ll attempt to quickly describe what we changed. As described in the problem statement, analysts could only ship code every 1-2 weeks and the process was incredibly painful. It was more than 16 steps with multiple ‘gotchas’ where analysts would get stuck and waste time. -The process is now six steps, with massive time savings for the analytics team both in the creation and testing of their script along with how long it takes for new data to get into the data warehouse.This is on top of all of dbt’s benefits such as data definitions, lineage, automatic testing, macros…the list goes on. +The process is now six steps, with massive time savings for the analytics team both in the creation and testing of their script along with how long it takes for new data to get into the data warehouse.This is on top of all of dbt’s benefits such as data definitions, lineage, automatic testing, macros…the list goes on. ![Graphic depicting Smartsheet's new and improved code production process that outlines 6 steps](/img/blog/2022-02-23-founding-an-AE-team-smartsheet/completed-process.png) diff --git a/website/blog/2022-04-14-add-ci-cd-to-bitbucket.md b/website/blog/2022-04-14-add-ci-cd-to-bitbucket.md index 36c25a8653a..6cdb20d0df3 100644 --- a/website/blog/2022-04-14-add-ci-cd-to-bitbucket.md +++ b/website/blog/2022-04-14-add-ci-cd-to-bitbucket.md @@ -183,7 +183,7 @@ Reading the file over, you can see that we: 3. Specify that this pipeline is a two-step process 4. Specify that in the first step called “Deploy to production”, we want to: 1. Use whatever pip cache is available, if any - 2. Keep whatever JSON files are generated in this step in target/ + 2. Keep whatever files are generated in this step in target/ 3. Run the dbt setup by first installing dbt as defined in requirements.txt, then adding `profiles.yml` to the location dbt expects them in, and finally running `dbt deps` to install any dbt packages 4. Run `dbt seed`, `run`, and `snapshot`, all with `prod` as specified target 5. Specify that in the first step called “Upload artifacts for slim CI runs”, we want to use the Bitbucket “pipe” (pre-defined action) to authenticate with environment variables and upload all files that match the glob `target/*.json`. diff --git a/website/blog/2022-04-19-complex-deduplication.md b/website/blog/2022-04-19-complex-deduplication.md index fdb3cf1f60d..613b63c04b8 100644 --- a/website/blog/2022-04-19-complex-deduplication.md +++ b/website/blog/2022-04-19-complex-deduplication.md @@ -15,7 +15,7 @@ Let’s get rid of these dupes and send you on your way to do the rest of the *s -You’re here because your duplicates are *special* duplicates. These special dupes are not the basic ones that have same exact values in every column and duplicate primary keys that can be easily fixed by haphazardly throwing in a `distinct` (yeah that’s right, I called using `distinct` haphazard!). These are *partial* duplicates, meaning your entity of concern's primary key is not unique *on purpose* (or perhaps you're just dealing with some less than ideal data syncing). You may be capturing historical, type-two slowly changing dimensional data, or incrementally building a table with an append-only strategy, because you actually want to capture some change over time for the entity your recording. (Or, as mentioned, your loader may just be appending data indiscriminately on a schedule without much care for your time and sanity.) Whatever has brought you here, you now have a table where the is not your entity’s primary key, but instead the entity’s primary key + the column values that you’re tracking. Confused? Let’s look at an example. +You’re here because your duplicates are *special* duplicates. These special dupes are not the basic ones that have same exact values in every column and duplicate primary keys that can be easily fixed by haphazardly throwing in a `distinct` (yeah that’s right, I called using `distinct` haphazard!). These are *partial* duplicates, meaning your entity of concern's primary key is not unique *on purpose* (or perhaps you're just dealing with some less than ideal data syncing). You may be capturing historical, type-two slowly changing dimensional data, or incrementally building a table with an append-only strategy, because you actually want to capture some change over time for the entity your recording. (Or, as mentioned, your loader may just be appending data indiscriminately on a schedule without much care for your time and sanity.) Whatever has brought you here, you now have a table where the is not your entity’s primary key, but instead the entity’s primary key + the column values that you’re tracking. Confused? Let’s look at an example. Here’s your raw table: @@ -152,7 +152,7 @@ select * from filter_real_diffs > *What happens in this step? You check your data because you are thorough!* -Good thing dbt has already built this for you. Add a [unique test](https://docs.getdbt.com/docs/building-a-dbt-project/tests#generic-tests) to your YAML model block for your `grain_id` in this de-duped staging model, and give it a dbt test! +Good thing dbt has already built this for you. Add a [unique test](/docs/build/tests#generic-tests) to your YAML model block for your `grain_id` in this de-duped staging model, and give it a dbt test! ```yaml models: diff --git a/website/blog/2022-04-19-dbt-cloud-postman-collection.md b/website/blog/2022-04-19-dbt-cloud-postman-collection.md index 51d68b974ec..7ea81e89181 100644 --- a/website/blog/2022-04-19-dbt-cloud-postman-collection.md +++ b/website/blog/2022-04-19-dbt-cloud-postman-collection.md @@ -15,7 +15,7 @@ is_featured: true >This is for advanced users of dbt Cloud that are interested in expanding their knowledge of the dbt API via an interactive Postman Collection. We only suggest diving into this once you have a strong knowledge of dbt + dbt Cloud. You have a couple of options to review the collection: > >* get a live version of the collection via [![Run in Postman](https://run.pstmn.io/button.svg)](https://god.gw.postman.com/run-collection/14183654-892ae7dc-e6a2-4165-8b57-1545dd69e4aa?action=collection%2Ffork&collection-url=entityId%3D14183654-892ae7dc-e6a2-4165-8b57-1545dd69e4aa%26entityType%3Dcollection%26workspaceId%3D048d09df-b9b5-4794-ad11-d0453ec3ecc4). ->* check out the [collection documentation](https://documenter.getpostman.com/view/14183654/UVsSNiXC#auth-info-7ad22661-8807-406b-aec0-34a46b671aac) to learn how to use it. +>* check out the [collection documentation](https://documenter.getpostman.com/view/14183654/UyxohieD#93c57cbf-3cb9-4c14-8c9a-278c19c5d6f1) to learn how to use it. The dbt Cloud API has well-documented endpoints for creating, triggering and managing dbt Cloud jobs. But there are other endpoints that aren’t well documented yet, and they’re extremely useful for end-users. These endpoints exposed by the API enable organizations not only to orchestrate jobs, but to manage their dbt Cloud accounts programmatically. This creates some really interesting capabilities for organizations to scale their dbt Cloud implementations. diff --git a/website/blog/2022-05-03-making-dbt-cloud-api-calls-using-dbt-cloud-cli.md b/website/blog/2022-05-03-making-dbt-cloud-api-calls-using-dbt-cloud-cli.md index d8337357518..47da8cbe2ff 100644 --- a/website/blog/2022-05-03-making-dbt-cloud-api-calls-using-dbt-cloud-cli.md +++ b/website/blog/2022-05-03-making-dbt-cloud-api-calls-using-dbt-cloud-cli.md @@ -12,9 +12,9 @@ date: 2022-05-03 is_featured: true --- -dbt Cloud is a hosted service that many organizations use for their dbt deployments. Among other things, it provides an interface for creating and managing deployment jobs. When triggered (e.g., cron schedule, API trigger), the jobs generate various artifacts that contain valuable metadata related to the dbt project and the run results. +dbt Cloud is a hosted service that many organizations use for their dbt deployments. Among other things, it provides an interface for creating and managing deployment jobs. When triggered (e.g., cron schedule, API trigger), the jobs generate various artifacts that contain valuable metadata related to the dbt project and the run results. -dbt Cloud provides a REST API for managing jobs, run artifacts and other dbt Cloud resources. Data/analytics engineers would often write custom scripts for issuing automated calls to the API using tools [cURL](https://curl.se/) or [Python Requests](https://docs.python-requests.org/en/latest/). In some cases, the engineers would go on and copy/rewrite them between projects that need to interact with the API. Now, they have a bunch of scripts on their hands that they need to maintain and develop further if business requirements change. If only there was a dedicated tool for interacting with the dbt Cloud API that abstracts away the complexities of the API calls behind an easy-to-use interface… Oh wait, there is: [the dbt-cloud-cli](https://github.com/data-mie/dbt-cloud-cli)! +dbt Cloud provides a REST API for managing jobs, run artifacts and other dbt Cloud resources. Data/analytics engineers would often write custom scripts for issuing automated calls to the API using tools [cURL](https://curl.se/) or [Python Requests](https://requests.readthedocs.io/en/latest/). In some cases, the engineers would go on and copy/rewrite them between projects that need to interact with the API. Now, they have a bunch of scripts on their hands that they need to maintain and develop further if business requirements change. If only there was a dedicated tool for interacting with the dbt Cloud API that abstracts away the complexities of the API calls behind an easy-to-use interface… Oh wait, there is: [the dbt-cloud-cli](https://github.com/data-mie/dbt-cloud-cli)! @@ -53,7 +53,7 @@ dbt-cloud job run --job-id 43167 You probably agree that the latter example is definitely more elegant and easier to read. `dbt-cloud` handles the request boilerplate (e.g., api token in the header, endpoint URL) so that you don’t need to worry about authentication or remember which endpoint to use. Also, the CLI implements additional functionality (e.g., `--wait`) for some endpoints; for example, `dbt cloud job run --wait` will issue the job trigger, wait until the job finishes, fails or is cancelled and then prints out the job status response. -In addition to CLI commands that interact with a single dbt Cloud API endpoint there are composite helper commands that call one or more API endpoints and perform more complex operations. One example of composite commands are `dbt-cloud job export` and `dbt-cloud job import` where, under the hood, the export command performs a `dbt-cloud job get` and writes the job metadata to a JSON file and the import command reads job parameters from a JSON file and calls `dbt-cloud job create`. The export and import commands can be used in tandem to move dbt Cloud jobs between projects. Another example is the `dbt-cloud job delete-all` which fetches a list of all jobs using `dbt-cloud job list` and then iterates over the list prompting the user if they want to delete the job. For each job that the user agrees to delete a `dbt-cloud job delete` is performed. +In addition to CLI commands that interact with a single dbt Cloud API endpoint there are composite helper commands that call one or more API endpoints and perform more complex operations. One example of composite commands are `dbt-cloud job export` and `dbt-cloud job import` where, under the hood, the export command performs a `dbt-cloud job get` and writes the job metadata to a file and the import command reads job parameters from a JSON file and calls `dbt-cloud job create`. The export and import commands can be used in tandem to move dbt Cloud jobs between projects. Another example is the `dbt-cloud job delete-all` which fetches a list of all jobs using `dbt-cloud job list` and then iterates over the list prompting the user if they want to delete the job. For each job that the user agrees to delete a `dbt-cloud job delete` is performed. To install the CLI in your Python environment run `pip install dbt-cloud-cli` and you’re all set. You can use it locally in your development environment or e.g. in a GitHub actions workflow. @@ -78,7 +78,7 @@ Now we had exactly what we wanted and our CI workflow in GitHub actions looked s dbt-cloud job run --job-id $DBT_CLOUD_JOB_ID ``` -Fast forward a month or two and there was another client that needed something similar. I felt that this was an opportunity to open source the project not just to benefit me and my clients but also [the broader dbt community](https://www.getdbt.com/community/) (❤️). So, I moved the project to a public github repository with a goal of eventually covering all of the dbt Cloud API endpoints. +Fast forward a month or two and there was another client that needed something similar. I felt that this was an opportunity to open source the project not just to benefit me and my clients but also [the broader dbt community](https://www.getdbt.com/community/) (❤️). So, I moved the project to a public github repository with a goal of eventually covering all of the dbt Cloud API endpoints. While working with the initial 0.1.0 release that included only the `dbt-cloud job run` command I decided to have some fun and try how well pydantic (Python dataclasses on steroids!) and `click` worked together. I’m a big fan of `pydantic`, and I’ve used it in a wide variety of projects including machine learning models and automated testing software for a medical device. Even though Python has had built-in dataclasses since version 3.7, they fall short when it comes to data validation and general developer ergonomics (IMO) and that’s where `pydantic` comes in; among other things, `pydantic` implements a validator decorator that is used to define custom validations for model fields (e.g., CLI arguments). @@ -87,15 +87,15 @@ I refactored the `dbt-cloud-cli` code so that the CLI commands were now implemen ```python import click from dbt_cloud.command import DbtCloudJobGetCommand - + @click.group() def dbt_cloud(): pass - + @dbt_cloud.group() def job(): pass - + @job.command(help=DbtCloudJobGetCommand.get_description()) @DbtCloudJobGetCommand.click_options def get(**kwargs): @@ -121,7 +121,7 @@ Next, we use the `dbt-cloud get-artifact` command to download the `catalog.json` dbt-cloud run get-artifact --run-id $latest_run_id --path catalog.json -f catalog.json ``` -To explore the downloaded catalog file we’ll write a simple CLI application. The [catalog.json](https://schemas.getdbt.com/dbt/catalog/v1.json) has four top level properties: metadata, nodes, sources and errors. In this example we explore the nodes and sources only and leave the metadata and errors out. +To explore the downloaded catalog file we’ll write a simple CLI application. The [catalog.json](https://schemas.getdbt.com/dbt/catalog/v1.json) has four top level properties: metadata, nodes, sources and errors. In this example we explore the nodes and sources only and leave the metadata and errors out. First, we need a `Catalog` abstraction that reflects the catalog JSON schema: @@ -197,7 +197,7 @@ class Catalog(BaseModel): errors: Optional[Dict] ``` -The four abstractions (`Stats`,`Column`, `Node `and `Catalog`) all inherit [the pydantic BaseModel](https://pydantic-docs.helpmanual.io/usage/models/) which implements various methods for parsing files and other python objects into model instances. We’ll leave the parsing to pydantic (i.e., `BaseModel.parse_file` classmethod) so that we can focus solely on the app logic. +The four abstractions (`Stats`,`Column`, `Node `and `Catalog`) all inherit [the pydantic BaseModel](https://pydantic-docs.helpmanual.io/usage/models/) which implements various methods for parsing files and other python objects into model instances. We’ll leave the parsing to pydantic (i.e., `BaseModel.parse_file` classmethod) so that we can focus solely on the app logic. The `CatalogExploreCommand` abstraction implements the CLI app which is then wrapped in a `click.command` that implements the CLI entry point. The `CatalogExploreCommand` class inherits `ClickBaseModel` that implements a `click_options` classmethod which we’ll use to decorate the entry point. This method is where the pydantic to click translation magic happens (i.e., pydantic model fields are translated [into click options](https://click.palletsprojects.com/en/8.0.x/options/)). Note that the app [uses inquirer](https://github.com/magmax/python-inquirer) in addition to `click` to create interactive “select option from a list” CLI prompts. @@ -317,7 +317,7 @@ dbt-cloud demo data-catalog --file catalog.json ## Parting thoughts -To summarize, the `dbt-cloud-cli`I implements an easy-to-use command line interface for the dbt Cloud API which abstracts away the complexities of the API calls. The CLI has interfaces to many of the API endpoints and covering all of the endpoints is on the project’s roadmap. For a list of all the covered API endpoints and implemented CLI commands, see https://github.com/data-mie/dbt-cloud-cli. +To summarize, the `dbt-cloud-cli`I implements an easy-to-use command line interface for the dbt Cloud API which abstracts away the complexities of the API calls. The CLI has interfaces to many of the API endpoints and covering all of the endpoints is on the project’s roadmap. For a list of all the covered API endpoints and implemented CLI commands, see https://github.com/data-mie/dbt-cloud-cli. In addition to commands that interact with a single dbt Cloud API endpoint there are composite helper commands that call one or more API endpoints and perform more complex operations (e.g., `dbt-cloud job export` and `dbt-cloud job import`). diff --git a/website/blog/2022-05-17-stakeholder-friendly-model-names.md b/website/blog/2022-05-17-stakeholder-friendly-model-names.md index 7d8186c669c..0e0ccad5c96 100644 --- a/website/blog/2022-05-17-stakeholder-friendly-model-names.md +++ b/website/blog/2022-05-17-stakeholder-friendly-model-names.md @@ -1,5 +1,5 @@ --- -title: "Stakeholder-friendly model names: model naming conventions that give context" +title: "Stakeholder-friendly model names: Model naming conventions that give context" description: "Your model names are usually made by engineers, for engineers. While that's great for maintainability, your end users won't have the same context into those model names as you do." slug: stakeholder-friendly-model-names @@ -40,14 +40,14 @@ Analysts are interfacing with data from the outside in. They are in meetings wit - Precomputed views/tables in a BI tool - Read-only access to the dbt Cloud IDE docs -- Full list of tables and views in their data warehouse +- Full list of tables and views in their #### Precomputed views/tables in a BI tool Here we have drag and drop functionality and a skin over top of the underlying `database.schema.table` where the database object is stored. The BI Tool has been configured by an Analytics Engineer or Data Engineer to automatically join datasets as you click/drag/drop fields into your exploration. **How model names can make this painful:** -The end users might not even know what tables the data refers to, as potentially everything is joined by the system and they don’t need to write their own queries. If model names are chosen poorly, there is a good chance that the BI layer on top of the database tables has been renamed to something more useful for the analysts. This adds an extra step of mental complexity in tracing the lineage from data model to BI. +The end users might not even know what tables the data refers to, as potentially everything is joined by the system and they don’t need to write their own queries. If model names are chosen poorly, there is a good chance that the BI layer on top of the database tables has been renamed to something more useful for the analysts. This adds an extra step of mental complexity in tracing the lineage from data model to BI. #### Read only access to the dbt Cloud IDE docs If Analysts want more context via documentation, they may traverse back to the dbt layer and check out the data models in either the context of the Project or Database. In the Project view, they will see the data models in the folder hierarchy present in your project’s repository. In the Database view you will see the output of the data models as present in your database, ie. `database / schema / object`. @@ -157,7 +157,7 @@ These 3 parts go from least granular (general) to most granular (specific) so yo ### Coming up... -In this part of the series, we talked about why the model name is the center of understanding for the purpose and content within a model. In the in the upcoming "How We Structure Our dbt Projects" guide, you can explore how to use this naming pattern with more specific examples in different parts of your dbt DAG that cover regular use cases: +In this part of the series, we talked about why the model name is the center of understanding for the purpose and content within a model. In the in the upcoming ["How We Structure Our dbt Projects"](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview) guide, you can explore how to use this naming pattern with more specific examples in different parts of your dbt DAG that cover regular use cases: - How would you name a model that is filtered on some columns - Do we recommend naming snapshots in a specific way diff --git a/website/blog/2022-05-19-redshift-configurations-dbt-model-optimizations.md b/website/blog/2022-05-19-redshift-configurations-dbt-model-optimizations.md index 239fa7148c6..c01194360f1 100644 --- a/website/blog/2022-05-19-redshift-configurations-dbt-model-optimizations.md +++ b/website/blog/2022-05-19-redshift-configurations-dbt-model-optimizations.md @@ -234,13 +234,13 @@ I won’t get into our modeling methodology at dbt Labs in this article, but the ### Staggered joins -![Staggered-Joins.png](/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Staggered-Joins.png) +![Staggered-Joins.png](/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Staggered-Joins.jpg) In this method, you piece out your joins based on the main table they’re joining to. For example, if you had five tables that were all joined using `person_id`, then you would stage your data (doing your clean up too, of course), distribute those by using `dist='person_id'`, and then marry them up in some table downstream. Now with that new table, you can choose the next distribution key you’ll need for the next process that will happen. In our example above, the next step is joining to the `anonymous_visitor_profiles` table which is distributed by `mask_id`, so the results of our join should also distribute by `mask_id`. ### Resolve to a single key -![Resolve-to-single-key](/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Resolve-to-single-key.png) +![Resolve-to-single-key](/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Resolve-to-single-key.jpg) This method takes some time to think about, and it may not make sense to do it depending on what you need. This is definitely balance between coherence, usability, and performance. diff --git a/website/blog/2022-06-30-coalesce-sql.md b/website/blog/2022-06-30-coalesce-sql.md new file mode 100644 index 00000000000..dc85daf8a53 --- /dev/null +++ b/website/blog/2022-06-30-coalesce-sql.md @@ -0,0 +1,84 @@ +--- +title: "COALESCE SQL function: Why we love it" +description: "The COALESCE SQL function is an incredibly useful function that allows you to fill in unhelpful blank values that may show up in your data." +slug: coalesce-sql-love-letter + +authors: [kira_furuichi] + +tags: [SQL Magic] +hide_table_of_contents: false + +date: 2022-05-08 +is_featured: false +--- + +It’s inevitable in the field of analytics engineering: you’re going to encounter moments when there’s mysterious or unhelpful blank values in your data. Null values surely have their time and place, but when you need those null values filled with more meaningful data, COALESCE comes to the rescue. + +COALESCE is an incredibly useful function that allows you to fill in unhelpful blank values that may show up in your data. In the words of analytics engineer [Lauren Benezra](https://docs.getdbt.com/author/lauren_benezra), you will probably almost never see a data model that doesn’t use COALESCE somewhere. + + + +> **What is a SQL Function?** +> At a high level, a function takes an input (or multiple inputs) and returns a manipulation of those inputs. Some common SQL functions are [EXTRACT](https://docs.getdbt.com/blog/extract-sql-love-letter/), [LOWER](https://docs.getdbt.com/blog/lower-sql-love-letter/), and [DATEDIFF](https://docs.getdbt.com/blog/datediff-sql-love-letter/). For example, the LOWER function takes a string value and returns an all lower-case version of that input string. + +## How to use the COALESCE function + +In formal terms, using the COALESCE function on a series of values will return the first non-null value.  + +The general syntax for using the COALESCE function looks like the following: + +```sql +coalesce(, ,...) +``` + +You can have as many input values/columns to the COALESCE function as you like, but remember: order is important here since the first non-null value is the one that is returned. In practice, you’ll likely only ever use the COALESCE function with two inputs: a column and the value you want to fill null values of that column with. + +> **See it in action:** +> The COALESCE function is used in the [surrogate_key](https://docs.getdbt.com/blog/sql-surrogate-keys) macro to replace null column values. + +### Data warehouse support for the COALESCE function + +Most, if not all, modern data warehouses support the COALESCE function; [Google BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/conditional_expressions#coalesce), [Amazon Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_COALESCE.html), [Snowflake](https://docs.snowflake.com/en/sql-reference/functions/coalesce.html), [Postgres](https://www.postgresqltutorial.com/postgresql-tutorial/postgresql-coalesce/), and [Databricks](https://docs.databricks.com/sql/language-manual/functions/coalesce.html) all support the COALESCE function. In addition, the syntax to use COALESCE is the same across all of them. + +## COALESCE SQL function example + +Let’s look at an actual example using COALESCE. Below, we have an `orders` with three column values: an `order_id`, `order_date`, and `order_status`. + +| **order_id** | **order_date** | **order_status** | +| ------------ | -------------- | ---------------- | +| 12389 | 2022-01-02 | | +| 34553 | 2020-04-23 | returned | +| 78411 | 2022-06-06 | | + +If you do a little exploration on this table, you would see that there are only two unique values for `order_status`: NULL and `returned`. As we said before, null values have their time and place, but if you first look at this table, the null value for an order could mean many things–has the order been processed? Was the order successful? + +In this `orders` table, you can assume here that any NULL `order_status` value means that the order was not returned. To make this more clear to anyone who looks at this table, you can utilize a COALESCE function to return a newer, more readable `order_status`. + +```sql +select + order_id, + order_date, + coalesce(order_status, 'not_returned') as order_status +from {{ ref('orders') }} +``` + +Running this query would return the following: + +| **order_id** | **order_date** | **order_status** | +| ------------ | -------------- | ---------------- | +| 12389 | 2022-01-02 | not_returned | +| 34553 | 2020-04-23 | returned | +| 78411 | 2022-06-06 | not_returned | + +Now, there are no null values in the `order_status` column since any null value was replaced by a `not_returned` string. Order 34553’s `order_status` remained unchanged because its original `order_status` was the first non-null value passed in the COALESCE function. By providing more context into what these null values mean, anyone who looks at this table can quickly understand the order status for a specific order. + +> **To replace or not to replace:** +> COALESCE has a straightforward use case—fill missing values with values you specify—but you also want to ensure you’re not changing non-empty values when using it. This is where the order of the input values to the COALESCE function are important: from left to right, the first non-null value is the one that’s returned. + +## Why we love it + +We checked our data team’s dbt project, and we used the COALESCE function over 100 times. We like the COALESCE function so much we named the [annual data conference on analytics engineering](https://coalesce.getdbt.com/) after it. + +At its core, the COALESCE function is an efficient way to fill in missing column values with values you specify. You can additionally use COALESCE across most, if not all, modern data warehouses and there’s [no tricky cross-database syntax like there is for DATEADD](https://docs.getdbt.com/blog/sql-dateadd). + +Thank you COALESCE for always finding our moments of emptiness, and filling them with valuable stuff. diff --git a/website/blog/2022-06-30-extract-sql-function.md b/website/blog/2022-06-30-extract-sql-function.md new file mode 100644 index 00000000000..b81a7254a76 --- /dev/null +++ b/website/blog/2022-06-30-extract-sql-function.md @@ -0,0 +1,85 @@ +--- +title: "EXTRACT SQL function: Why we love it" +description: "In this post, we’re going to give a deep-dive into the EXTRACT function, how it works, and why we use it. The EXTRACT function allows you to extract a specified date part from a date/time. " +slug: extract-sql-love-letter + +authors: [kira_furuichi] + +tags: [SQL Magic] +hide_table_of_contents: false + +date: 2022-05-15 +is_featured: false +--- +There are so many different date functions in SQL—you have [DATEDIFF](https://docs.getdbt.com/blog/datediff-sql-love-letter/), [DATEADD](https://docs.getdbt.com/blog/sql-dateadd), DATE_PART, and [DATE_TRUNC](https://docs.getdbt.com/date-trunc-sql) to name a few. They all have their different use cases and understanding how and when they should be used is a SQL fundamental to get down. Are any of those as easy to use as the EXTRACT function? Well, that debate is for another time… + +In this post, we’re going to give a deep dive into the EXTRACT function, how it works, and why we use it. + + + +The EXTRACT function allows you to extract a specified date part from a date/time. For example, if you were to extract the month from the date February 14, 2022, it would return 2 since February is the second month in the year. + +> **What is a SQL function?** +> At a high level, a function takes an input (or multiple inputs) and returns a manipulation of those inputs. Some common SQL functions are [COALESCE](https://docs.getdbt.com/blog/coalesce-sql-love-letter/), [LOWER](https://docs.getdbt.com/blog/lower-sql-love-letter/) and [DATEDIFF](https://docs.getdbt.com/blog/datediff-sql-love-letter/). For example, the COALESCE function takes a group of values and returns the first non-null value from that group. + +## How to use the EXTRACT function + +One of our favorite things about the EXTRACT function is how readable it is. Sometimes you may encounter SQL functions and not immediately understand what the arguments are and what the expected output should be. (We’re looking at you, SPLIT_PART.) The EXTRACT function isn’t like that. + +To use the EXTRACT function, you’ll simply specify the date part you want extracted out and the field you want to extract from. You can extract many different date parts, but you’ll most commonly see year, month, week of year, or quarter extracted from a date. + +```yaml +extract( from ) +``` + +Depending on the data warehouse you use, the value returned from an EXTRACT function is often a numeric value or the same date type as the input ``. Read the [documentation for your data warehouse](#data-warehouse-support-for-extract-function) to better understand EXTRACT outputs. + +> **Note:** +> You may additionally see a comma used in place of the ‘from’ in the EXTRACT function, like `extract(, )`. We feel that using that ‘from’ in the function makes it a little more readable. + +### The DATE_PART function + +You may also see the DATE_PART function used in place of the EXTRACT function. Both DATE_PART and EXTRACT perform the same functionality, it’s just a matter of preference on which one you want to use. + +> **Postgres & DATE_PART:** +> This is overly pedantic and you’ll likely never encounter an issue with DATE_PART and EXTRACT evaluating to differences in values that truly matter, but it’s worth noting. Postgres’ DATE_PART and EXTRACT functions would previously evaluate to the same output. However, with Postgres 14, the [EXTRACT function now returns a numeric type instead of an 8-byte float.](https://stackoverflow.com/questions/38442340/difference-between-extractyear-from-timestamp-function-and-date-partyear-t) + +### Data warehouse support for the EXTRACT function + +[Google BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/datetime_functions#extract), [Amazon Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_EXTRACT_function.html), [Snowflake](https://docs.snowflake.com/en/sql-reference/functions/extract.html), [Postgres](https://www.postgresqltutorial.com/postgresql-date-functions/postgresql-extract/), and [Databricks](https://docs.databricks.com/sql/language-manual/functions/extract.html) all support the EXTRACT function. In addition, the syntax to use EXTRACT is the same across all of them. + +## EXTRACT function example + +Let’s take this to an actual example! We’re going to use the [jaffle shop](https://github.com/dbt-labs/jaffle_shop/blob/main/models/orders.sql), a simple dataset and dbt project, to help us. The jaffle shop’s `orders` has some fields around an order’s status, order date, and order amount. + +You can extract different time-based values (weeks, months, years, etc.) from the `order_date` in  the `orders` model using the following code: + +```sql +select + order_id, + order_date, + extract(week from order_date) as order_week, + extract(month from order_date) as order_month, + extract(year from order_date) as order_year +from {{ ref('orders') }} +``` + +After running this query, your results would look a little something like this: + +| **order_id** | **order_date** | **order_week** | **order_month** | **order_year** | +| ------------ | -------------- | -------------- | --------------- | -------------- | +| 1 | 2018-01-01 | 1 | 1 | 2018 | +| 9 | 2018-01-12 | 2 | 1 | 2018 | +| 72 | 2018-03-23 | 12 | 3 | 2018 | + +As you can see, this query extracted the week of year, month of year, and year from the `order_date`. + +## Why we love it + +We’re going to be honest: EXTRACT isn’t the most widely used SQL function in our dbt project. However, EXTRACT has its time and place:  + +* Fiscal calendars: If your business uses fiscal years, or calendars that differ from the normal 12-month cycle, EXTRACT functions can help create alignment between fiscal calendars and normal calendars +* Ad hoc analysis: EXTRACT functions are useful in ad hoc analyses and queries when you need to look at values grouped by date periods or for period comparisons + +Extract is a consistent, helpful, and straightforward function–what more could we ask for from a ~~friend~~ function? + diff --git a/website/blog/2022-06-30-lower-sql-function.md b/website/blog/2022-06-30-lower-sql-function.md new file mode 100644 index 00000000000..353b11376b0 --- /dev/null +++ b/website/blog/2022-06-30-lower-sql-function.md @@ -0,0 +1,83 @@ +--- +title: "LOWER SQL function: Why we love it" +description: "The LOWER SQL Function allows you to return a string value as an all lowercase string. It’s an effective way to create consistent capitalization for string values across your data." +slug: lower-sql-love-letter + +authors: [kira_furuichi] + +tags: [SQL Magic] +hide_table_of_contents: false + +date: 2022-05-11 +is_featured: false +--- + +We’ve all been there: + +* In a user signup form, user A typed in their name as `Kira Furuichi`, user B typed it in as `john blust`, and user C wrote `DAvid KrevitT` (what’s up with that, David??) +* Your backend application engineers are adamant customer emails are in all caps +* All of your event tracking names are lowercase + +In the real world of human imperfection, opinions, and error, string values are likely to take inconsistent capitalization across different data sources (or even within the same data source). There’s always a little lack of rhyme or reason for why some values are passed as upper or lowercase, and it’s not worth the headache to unpack that. + +So how do you create uniformity for string values that you collect across all your data sources? The LOWER function! + + + +Using the LOWER function on a string value will return the input as an all lowercase string. It’s an effective way to create consistent capitalization for string values across your data. + +> **What is a SQL function?** +> At a high level, a function takes an input (or multiple inputs) and returns a manipulation of those inputs. Some common SQL functions are [COALESCE](https://docs.getdbt.com/blog/coalesce-sql-love-letter/), [EXTRACT](https://docs.getdbt.com/blog/extract-sql-love-letter), and [DATEDIFF](https://docs.getdbt.com/blog/datediff-sql-love-letter/). For example, the COALESCE function takes a group of values and returns the first non-null value from that group. + +## How to use the LOWER function + +The syntax for using the LOWER function looks like the following: + +```sql +lower('') +``` + +Executing this command in a SELECT statement will return the lowercase version of the input string. You may additionally use the LOWER function in WHERE clauses and joins. + +### Data warehouse support for the LOWER function + +[Google BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#lower), [Amazon Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_LOWER.html), [Snowflake](https://docs.snowflake.com/en/sql-reference/functions/lower.html), [Postgres](https://www.postgresqltutorial.com/postgresql-string-functions/postgresql-letter-case-functions/), and [Databricks](https://docs.databricks.com/sql/language-manual/functions/lower.html) all support the LOWER function. In addition, the syntax to use LOWER is the same across all of them. + +## LOWER SQL function example + +Let’s take this to an actual example! Below, you’ll see the first three rows from the `customers` in the [jaffle_shop](https://github.com/dbt-labs/jaffle_shop), a simple dataset and dbt project, that has three columns: `customer_id`, `first_name`, and `last_name`. + +| **customer_id** | **first_name** | **last_name** | +| --------------- | -------------- | ------------- | +| 1 | Michael | P. | +| 2 | Shawn | M. | +| 3 | Kathleen | P. | + +You can lower the first name and last name of the `customers` model using the following code: + +```sql +select + customer_id, + lower(first_name) as first_name, + lower(last_name) as last_name +from {{ ref('customers') }} +``` + +After running this query, the `customers` table will look a little something like this: + +| **customer_id** | **first_name** | **last_name** | +| --------------- | -------------- | ------------- | +| 1 | michael | p. | +| 2 | shawn | m. | +| 3 | kathleen | p. | + +Now, all characters in the `first_name` and `last_name` columns are lowercase. + +> **Where do you lower?** +> Changing all string columns to lowercase to create uniformity across data sources typically happens in our dbt project’s [staging models](https://docs.getdbt.com/guides/best-practices/how-we-structure/2-staging). There are a few reasons for that: data cleanup and standardization, such as aliasing, casting, and lowercasing, should ideally happen in staging models to create downstream uniformity. It’s also more performant in downstream models that join on string values to join on strings that are of all the same casing versus having to join and perform lowercasing at the same time. + +## Why we love it + +Let’s go back to our chaotic trio of users A, B, and C who all used different capitalizations to type in their names. If you don’t create consistent capitalization for string values, how would a business user know what to exactly filter for in their BI tool? A business user could filter a name field on “John Blust” since that’s what they would expect it to look like, only to get zero results back. By creating a consistent capitalization format (upper or lowercase) for all string values in your data models, you, therefore, create some expectations for business users in your BI tool. + +There will most likely never be 100% consistency in your data models, but doing all that you can to mitigate that chaos will make your life and the life of your business users hopefully a little easier. Use the LOWER function to create a consistent casing for all strings in your data sources. diff --git a/website/blog/2022-07-05-date-trunc-sql-love-letter.md b/website/blog/2022-07-05-date-trunc-sql-love-letter.md new file mode 100644 index 00000000000..99f658590a6 --- /dev/null +++ b/website/blog/2022-07-05-date-trunc-sql-love-letter.md @@ -0,0 +1,100 @@ +--- +title: "DATE_TRUNC SQL function: Why we love it" +description: "The DATE_TRUNC function will truncate a date or time to the first instance for a given date part maintaining a date format. Wordy, wordy, wordy! What does this really mean?" +slug: date-trunc-sql + +authors: [kira_furuichi] + +tags: [sql magic] +hide_table_of_contents: true + +date: 2022-07-13 +is_featured: false +--- +In general, data people prefer the more granular over the less granular. [Timestamps > dates](https://docs.getdbt.com/blog/when-backend-devs-spark-joy#signs-the-data-is-sparking-joy), daily data > weekly data, etc.; having data at a more granular level always allows you to zoom in. However, you’re likely looking at your data at a somewhat zoomed-out level—weekly, monthly, or even yearly. To do that, you’re going to need a handy dandy function that helps you round out date or time fields. + +The DATE_TRUNC function will truncate a date or time to the first instance of a given date part. Wordy, wordy, wordy! What does this really mean? If you were to truncate `2021-12-13` out to its month, it would return `2021-12-01` (the first day of the month). + +Using the DATE_TRUNC function, you can truncate to the weeks, months, years, or other date parts for a date or time field. This can make date/time fields easier to read, as well as help perform cleaner time-based analyses. + + + +> **What is a SQL function?** +> At a high level, a function takes an input (or multiple inputs) and returns a manipulation of those inputs. Some common SQL functions are [COALESCE](https://getdbt.com/sql-foundations/coalesce-sql-love-letter/), [LOWER](https://getdbt.com/sql-foundations/lower-sql-love-letter/), and [EXTRACT](https://getdbt.com/sql-foundations/extract-sql-love-letter/). For example, the COALESCE function takes a group of values and returns the first non-null value from that group. + +Overall, it’s a great function to use to help you aggregate your data into specific date parts while keeping a date format. However, the DATE_TRUNC function isn’t your swiss army knife–it’s not able to do magic or solve all of your problems (we’re looking at you [star](https://getdbt.com/sql-foundations/star-sql-love-letter/)). Instead, DATE_TRUNC is your standard kitchen knife—it’s simple and efficient, and you almost never start cooking (data modeling) without it. + +## How to use the DATE_TRUNC function + +For the DATE_TRUNC function, there are two arguments you must pass in: + +* The date part: This is the days/months/weeks/years (level) you want your field to be truncated out to +* The date/time you want to be truncated + +The DATE_TRUNC function can be used in SELECT statements and WHERE clauses. + +Most, if not all, modern cloud data warehouses support some type of the DATE_TRUNC function. There may be some minor differences between the argument order for DATE_TRUNC across data warehouses, but the functionality very much remains the same. + +Below, we’ll outline some of the slight differences in the implementation between some of the data warehouses. + +### The DATE_TRUNC function in Snowflake and Databricks + +In [Snowflake](https://docs.snowflake.com/en/sql-reference/functions/date_trunc.html) and [Databricks](https://docs.databricks.com/sql/language-manual/functions/date_trunc.html), you can use the DATE_TRUNC function using the following syntax: + +```sql +date_trunc(, ) +``` + +In these platforms, the `` is passed in as the first argument in the DATE_TRUNC function. + +### The DATE_TRUNC function in Google BigQuery and Amazon Redshift + +In [Google BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/date_functions#date_trunc) and [Amazon Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_DATE_TRUNC.html), the `` is passed in as the first argument and the `` is the second argument. + +```sql +date_trunc(, ) +``` + +> **A note on BigQuery:** +> BigQuery’s DATE_TRUNC function supports the truncation of date types, whereas Snowflake, Redshift, and Databricks’ can be a date or timestamp data type. BigQuery also supports DATETIME_TRUNC and TIMESTAMP_TRUNC functions to support truncation of more granular date/time types. + +## A dbt macro to remember + +Why Snowflake, Amazon Redshift, Databricks, and Google BigQuery decided to use different implementations of essentially the same function is beyond us and it’s not worth the headache trying to figure that out. Instead of remembering if the`` or the `` comes first, (which, let’s be honest, we can literally never remember) you can rely on a dbt Core macro to help you get away from finicky syntax. + +With dbt v1.2, [adapters](https://docs.getdbt.com/docs/supported-data-platforms) now support [cross-database macros](https://docs.getdbt.com/reference/dbt-jinja-functions/cross-database-macros) to help you write certain functions, like [DATE_TRUNC](https://docs.getdbt.com/reference/dbt-jinja-functions/cross-database-macros#date_trunc) and [DATEDIFF](https://docs.getdbt.com/reference/dbt-jinja-functions/cross-database-macros#datediff), without having to memorize sticky function syntax. + +> **Note:** +> Previously, [dbt_utils](https://github.com/dbt-labs/dbt-utils), a package of macros and tests that data folks can use to help write more DRY code in their dbt project, powered cross-database macros. Now, cross-database macros are available **regardless if dbt utils is installed or not.** + +Using the [jaffle shop](https://github.com/dbt-labs/jaffle_shop/blob/main/models/orders.sql), a simple dataset and dbt project, you can truncate the `order_date` from the `orders` table using the dbt [DATE_TRUNC macro](https://docs.getdbt.com/reference/dbt-jinja-functions/cross-database-macros#date_trunc): + +```sql +select + order_id, + order_date, + {{ date_trunc("week", "order_date") }} as order_week, + {{ date_trunc("month", "order_date") }} as order_month, + {{ date_trunc("year", "order_date") }} as order_year +from {{ ref('orders') }} +``` + +Running the above would product the following sample results: + +| order_id | order_date | order_week | order_month | order_year | +|:---:|:---:|:---:|:---:|:---:| +| 1 | 2018-01-01 | 2018-01-01 | 2018-01-01 | 2018-01-01 | +| 70 | 2018-03-12 | 2018-03-12 | 2018-03-01 | 2018-01-01 | +| 91 | 2018-03-31 | 2018-03-26 | 2018-03-01 | 2018-01-01 | + +The `order_week`, `order_month`, and `order_year` fields are the truncated values from the `order_date` field. + +**A mild word of warning:** If you’re using the DATE_TRUNC function to modify fields or create new ones, it’s important that you use strong naming conventions for these fields. Since the output from the DATE_TRUNC function looks like a normal date, other data folks or business users may not understand that it’s an altered field and may mistake it for the actual date something happened. + +## Why we love it + +The DATE_TRUNC function is a great way to do data analysis and data modeling that needs to happen at a zoomed-out date part. It’s often used for time-based work, such as customer retention modeling or analysis. The DATE_TRUNC function also allows you to keep the date format of a field which allows for the most ease and compatibility in most BI (business intelligence) tools. + +TL;DR – DATE_TRUNC is a handy, widely-used SQL function—and dbt has made it even simpler to start using! + +*This post is a part of the SQL love letters—a series on the SQL functions the dbt Labs data team members use and love. You can find [the entire collection here](https://getdbt.com/sql-foundations/top-sql-functions).* diff --git a/website/blog/2022-07-05-datediff-sql-love-letter.md b/website/blog/2022-07-05-datediff-sql-love-letter.md new file mode 100644 index 00000000000..5661dde7a9e --- /dev/null +++ b/website/blog/2022-07-05-datediff-sql-love-letter.md @@ -0,0 +1,93 @@ +--- +title: "DATEDIFF SQL function: Why we love it" +description: "The DATEDIFF function will return the difference in specified units (ex. days, weeks, years) between a start date/time and an end date/time. It’s a simple and widely used function that you’ll find yourself using more often than you expect." +slug: datediff-sql-love-letter + +authors: [kira_furuichi] + +tags: [sql magic] +hide_table_of_contents: false + +date: 2022-07-13 +is_featured: false +--- + +*“How long has it been since this customer last ordered with us?”* + +*“What is the average number of days to conversion?”* + +Business users will have these questions, data people will have to answer these questions, and the only way to solve them is by calculating the time between two different dates. Luckily, there’s a handy DATEDIFF function that can do that for you. + +The DATEDIFF function will return the difference in specified units (ex. days, weeks, years) between a start date/time and an end date/time. It’s a simple and widely used function that you’ll find yourself using more often than you expect. + + + +> **What is a SQL function?** +> At a high level, a function takes an input (or multiple inputs) and returns a manipulation of those inputs. Some common SQL functions are [COALESCE](https://getdbt.com/sql-foundations/coalesce-sql-love-letter/), [LOWER](https://getdbt.com/sql-foundations/lower-sql-love-letter/), and [EXTRACT](https://getdbt.com/sql-foundations/extract-sql-love-letter/). For example, the COALESCE function takes a group of values and returns the first non-null value from that group. + +DATEDIFF is a little bit like your favorite pair of socks; you’ll usually find the first one easily and feel like the day is going to be great. But for some reason, the matching sock requires a little digging in the drawer. DATEDIFF is this pair of socks—you’ll inevitably find yourself Googling the syntax almost every time you use it, but you can’t go through your day without using it. + +This post will go over how to use the DATEDIFF function across different data warehouses and how to write more standardized DATEDIFF functions using a dbt macro (or successfully find your socks as a pair in one go). + +## How to use the DATEDIFF function + +For the DATEDIFF function, there’s three elements, or arguments, passed in: + +* The date part: This is the days/months/weeks/years (unit) of the difference calculated +* The first (start) date/time +* The second (end) date/time + +The DATEDIFF function can be used in SELECT statements and WHERE clauses. + +Most, if not all, modern cloud data warehouses support some type of the DATEDIFF function. There may be some minor differences between the argument order and function name for DATEDIFF across data warehouses, but the functionality very much remains the same. + +Below, we’ll outline some of the slight differences in the implementation between some data warehouses. + +### DATEDIFF in Snowflake, Amazon Redshift, and Databricks + +The syntax for using the DATEDIFF function in [Snowflake](https://docs.snowflake.com/en/sql-reference/functions/datediff.html) and [Amazon Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_DATEDIFF_function.html), and [Databricks](https://docs.databricks.com/sql/language-manual/functions/datediff3.html) looks like the following: + +```sql +datediff(, , ) +``` + +> **A note on Databricks:** +> Databricks additionally supports a separate [DATEDIFF function](https://docs.databricks.com/sql/language-manual/functions/datediff.html) that takes only two arguments: a start date and an end date. The function will always return the difference between two dates in days. + +### DATEDIFF in Google BigQuery + +The syntax for using the DATEDIFF function in [Google BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/datetime_functions#datetime_diff) looks like the following: + +Three minor differences in the implementation here: + +* Unlike in Snowflake, Amazon Redshift, and Databricks where the `` is passed as the first argument, the `` is passed in as the last argument in Google BigQuery. +* Google BigQuery also calls the function DATETIME_DIFF with an additional underscore separating the function name. This is on-par with [Google BigQuery’s preference to have underscores in function names](https://cloud.google.com/bigquery/docs/reference/standard-sql/date_functions). +* The DATETIME_DIFF arguments are datetimes, not dates; Snowflake, Redshift, and Databricks’ DATEDIFF functions support multiple date types such as dates and timestamps. BigQuery also supports a separate [DATE_DIFF function](https://cloud.google.com/bigquery/docs/reference/standard-sql/date_functions#date_diff) that will return the difference between two `date` types, unlike the DATETIME_DIFF that only supports the `datetime` type. + +## A hero in the shadows: The DATEDIFF dbt macro! + +You may be able to memorize the syntax for the DATEDIFF function for the primary data warehouse you use. What happens when you switch to a different one for a new job or a new data stack? Remembering if there’s an underscore in the function name or which argument the `` is passed in as is… no fun and leads to the inevitable, countless “datediff in bigquery” Google searches. + +Luckily, [dbt-core](https://github.com/dbt-labs/dbt-core) has your back! dbt Core is the open source dbt product that helps data folks write their data transformations following software engineering best practices. + +With dbt v1.2, [adapters](https://docs.getdbt.com/docs/supported-data-platforms) now support [cross-database macros](https://docs.getdbt.com/reference/dbt-jinja-functions/cross-database-macros) to help you write certain functions, like [DATE_TRUNC](https://docs.getdbt.com/reference/dbt-jinja-functions/cross-database-macros#date_trunc) and [DATEDIFF](https://docs.getdbt.com/reference/dbt-jinja-functions/cross-database-macros#datediff), without having to memorize sticky function syntax. + +> **Note:** +> Previously, [dbt_utils](https://github.com/dbt-labs/dbt-utils), a package of macros and tests that data folks can use to help write more DRY code in their dbt project, powered cross-database macros. Now, cross-database macros are available **regardless if dbt utils is installed or not.** + +Using the [DATEDIFF macro](https://docs.getdbt.com/reference/dbt-jinja-functions/cross-database-macros#datediff), you can calculate the difference between two dates without having to worry about finicky syntax. Specifically, this means you could successfully run the *same code* across multiple databases without having to worry about the finicky differences in syntax. + +Using the [jaffle shop](https://github.com/dbt-labs/jaffle_shop/blob/main/models/orders.sql), a simple dataset and dbt project, we can calculate the difference between two dates using the dbt DATEDIFF macro: + +```sql +select + *, + {{ datediff("order_date", "'2022-06-09'", "day") }} +from {{ ref('orders') }} +``` + +This would return all fields from the `orders` table and the difference in days between order dates and June 9, 2022. + +Under the hood, this macro is taking your inputs and creating the appropriate SQL syntax for the DATEDIFF function *specific to your data warehouse.* + +*This post is a part of the SQL love letters—a series on the SQL functions the dbt Labs data team members use and love. You can find [the entire collection here](https://getdbt.com/sql-foundations/top-sql-functions).* diff --git a/website/blog/2022-07-12-change-data-capture.md b/website/blog/2022-07-12-change-data-capture.md new file mode 100644 index 00000000000..b765cec9c95 --- /dev/null +++ b/website/blog/2022-07-12-change-data-capture.md @@ -0,0 +1,380 @@ +--- +title: "Strategies for change data capture in dbt" +description: "Capturing a historical view of your data is complex. Grace Goheen walks you through how to do it in this blog!" +slug: change-data-capture + +authors: [grace_goheen] + +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-07-14 +is_featured: true +--- + + +There are many reasons you, as an analytics engineer, may want to capture the complete version history of data: + +- You’re in an industry with a very high standard for data governance +- You need to track big OKRs over time to report back to your stakeholders +- You want to build a window to view history with both forward and backward compatibility + +These are often high-stakes situations! So accuracy in tracking changes in your data is key. + + + +If you’ve encountered this problem before, you know it’s a tricky one. dbt is [idempotent](https://discourse.getdbt.com/t/understanding-idempotent-data-transformations/518) - it recreates tables at runtime with the `CREATE TABLE AS` syntax. Because of this, the ability to access a full picture of historical outputs isn't intrinsic to dbt. + +Let’s imagine a specific scenario. Joanne is an analytics engineer for a large e-commerce company. The head of sales just messaged her the following question: + +“Can you tell me the income for January 2022 for all clothing products?” + +On the surface, this may seem like a simple question. But what if the calculation of income has changed since January 2022? Should Joanne calculate the income using the current formula or the formula that was used in January 2022? What if the source data for January changed after the month closed? Should Joanne use the source data as it was on January 30th, 2022 or the source data as it is now? + +All of these questions bubble up to our main theme: *How can you capture historical versions of our data using dbt?* + +Sorry, Joanne. The TL;DR is - “it depends.” + +When I first encountered this problem, it took time and effort to: + +1. think through the possible solutions + +and + +2. determine which solution best suited my needs + +The goal of this article is to eliminate step one – to provide you with a menu of solutions I’ve encountered so you can spend less time ideating and more time considering the nuances of your specific use-case. + +I’ll start by discussing a basic version of the scenario I first encountered – a ⚠️ misapplication ⚠️ of dbt’s snapshot functionality. Then, I’ll outline a couple of solutions: + +- **Downstream Incremental Model**: Build an incremental model downstream of the model which contains your business logic to “grab” every point-in-time version +- **Upstream Snapshots**: Build snapshots on all of your sources to capture changes in your raw data and calculate all versions of history every time you execute a `dbt run` + +Finally, I’ll discuss the pros and cons of each solution to give you a head start on step two. + +## Scenario + +Let’s return to Joanne. Using dbt and her favorite BI tool, Joanne has created an income report to track monthly income for each product category. + +You can imagine her DAG as shown below, where `fct_income` captures income per month for each product category. + +![](/img/blog/2022-07-12-change-data-capture-metrics/fct-income-dag.png) + +Joanne executes a `dbt run` on January 30th, 2022 and queries the resulting table: + +```sql +select * from fct_income where month_year = "January 2022" +``` + +She gets the following output: + +| month_year | product_category | income | run_timestamp | +|:---:|:---:|:---:|:---:| +| January 2022 | clothing | 100 | 01/30/22 12:00:00 | +| January 2022 | electronics | 200 | 01/30/22 12:00:00 | +| January 2022 | books | 100 | 01/30/22 12:00:00 | + +But a few days later, her source data changes for January - a manufacturing cost was dated incorrectly, and now has been updated in the source. Joanne executes a `dbt run` again on February 3rd. Now when she queries `fct_income`, she gets the following output: + +| month_year | product_category | income | run_timestamp | +|:---:|:---:|:---:|:---:| +| January 2022 | clothing | **50** | 02/03/22 16:00:00 | +| January 2022 | electronics | **150** | 02/03/22 16:00:00 | +| January 2022 | books | **200** | 02/03/22 16:00:00 | + +A few days later, Joanne finds a bug in her `dbt code`. She fixes the bug and executes a dbt run again on February 10th. Now, when she queries `fct_income`, she gets the following output: + +| month_year | product_category | income | run_timestamp | +|:---:|:---:|:---:|:---:| +| January 2022 | clothing | **52** | 02/10/22 08:00:00 | +| January 2022 | electronics | **152** | 02/10/22 08:00:00 | +| January 2022 | books | **202** | 02/10/22 08:00:00 | + +When the head of sales messages Joanne the following question: “Can you tell me the income for January 2022 for all clothing products?”, she’s unsure which number to give: 100, 50, or 52. + +![](/img/blog/2022-07-12-change-data-capture-metrics/income-meme.png) + +Because of this complexity, she decides to capture the history of her income report so that she can easily swap between versions in her BI tool. + +Her goal is to capture **all** versions of the `fct_income` model for January. Something like this: + +| month_year | product_category | income | run_timestamp | +|:---:|:---:|:---:|:---:| +| January 2022 | clothing | 100 | 01/30/22 12:00:00 | +| January 2022 | electronics | 200 | 01/30/22 12:00:00 | +| January 2022 | books | 300 | 01/30/22 12:00:00 | +| January 2022 | clothing | 50 | 02/03/22 16:00:00 | +| January 2022 | electronics | 150 | 02/03/22 16:00:00 | +| January 2022 | books | 200 | 02/03/22 16:00:00 | +| January 2022 | clothing | 52 | 02/10/22 08:00:00 | +| January 2022 | electronics | 152 | 02/10/22 08:00:00 | +| January 2022 | books | 202 | 02/10/22 08:00:00 | + +In order to achieve this **long table of history**, she decides to start [snapshotting](https://docs.getdbt.com/docs/building-a-dbt-project/snapshots) her final model, `fct_income`. + +:::caution Don't be like Joanne +I'm including the code samples for completeness, but remember: the method described in this scenario of snapshotting a final model contradicts dbt Labs' best practices. Either of the solutions detailed later is a better approach. +::: + +```sql +{% snapshot snapshot_fct_income %} + +{{ + config( + target_database='analytics', + target_schema='snapshots', + unique_key='id', + strategy='check', + check_cols=['income'] + ) +}} + +select + month_year || ' - ' || product_category as id, + * +from {{ ref('fct_income') }} + +{% endsnapshot %} +``` + +The output of `snapshot_fct_income` looks like this: + +| id | month_year | product_category | income | run_timestamp | dbt_valid_from | dbt_valid_to | +|:---:|:---:|:---:|:---:|:---:|:---:|:---:| +| January 2022 - clothing | January 2022 | clothing | 100 | 01/30/22 12:00:00 | 01/30/22 12:00:00 | 02/03/22 16:00:00 | +| January 2022 - electronics | January 2022 | electronics | 200 | 01/30/22 12:00:00 | 01/30/22 12:00:00 | 02/03/22 16:00:00 | +| January 2022 - books | January 2022 | books | 300 | 01/30/22 12:00:00 | 01/30/22 12:00:00 | 02/03/22 16:00:00 | +| January 2022 - clothing | January 2022 | clothing | 50 | 02/03/22 16:00:00 | 02/03/22 16:00:00 | 02/10/22 08:00:00 | +| January 2022 - electronics | January 2022 | electronics | 150 | 02/03/22 16:00:00 | 02/03/22 16:00:00 | 02/10/22 08:00:00 | +| January 2022 - books | January 2022 | books | 200 | 02/03/22 16:00:00 | 02/03/22 16:00:00 | 02/10/22 08:00:00 | +| January 2022 - clothing | January 2022 | clothing | 52 | 02/10/22 08:00:00 | 02/10/22 08:00:00 | NULL | +| January 2022 - electronics | January 2022 | electronics | 152 | 02/10/22 08:00:00 | 02/10/22 08:00:00 | NULL | +| January 2022 - books | January 2022 | books | 202 | 02/10/22 08:00:00 | 02/10/22 08:00:00 | NULL | + +Each month now has multiple versions of income, and the sales department is responsible for determining which version is “correct.” + +In order to keep track of which version has been marked as “correct” by the sales department, Joanne creates a seed file to capture which version of the `fct_income` model is the correct one for each month. The output of her seed `income_report_versions` looks like this: + +| month_year | correct_version | comment | +|:---:|:---:|:---:| +| January 2022 | 02/10/22 08:00:00 | Approved by Lucy | + +Her final DAG now looks like this: + +![](/img/blog/2022-07-12-change-data-capture-metrics/income-report-versions-dag.png) + +She's snapshotting `fct_income`, joining the seed file with the snapshot, then exposing the final output to her BI tool. The final output of `stg_snapshot_fct_income` looks like this: + +| month_year | product_category | income | run_timestamp | correct_version | +|:---:|:---:|:---:|:---:|:---:| +| January 2022 | clothing | 100 | 01/30/22 12:00:00 | FALSE | +| January 2022 | electronics | 200 | 01/30/22 12:00:00 | FALSE | +| January 2022 | books | 300 | 01/30/22 12:00:00 | FALSE | +| January 2022 | clothing | 50 | 02/03/22 16:00:00 | FALSE | +| January 2022 | electronics | 150 | 02/03/22 16:00:00 | FALSE | +| January 2022 | books | 200 | 02/03/22 16:00:00 | FALSE | +| January 2022 | clothing | 52 | 02/10/22 08:00:00 | TRUE | +| January 2022 | electronics | 152 | 02/10/22 08:00:00 | TRUE | +| January 2022 | books | 202 | 02/10/22 08:00:00 | TRUE | + +This method *technically* works. Joanne can track what she needs: + +- source data changes +- business logic changes + +And she can easily switch versions by adding a filter on her BI layer. + +However, this method causes long job times and adds potentially unnecessary complexity – one of the reasons our [best practices](https://docs.getdbt.com/docs/building-a-dbt-project/snapshots#snapshot-query-best-practices) recommend only using snapshots to track changes in your source data, rather than your final models. + +Below, you’ll find two solutions that are more effective than snapshotting a final model, as well as the pros and cons of each method. + +## Solution #1: Downstream Incremental Model + +Instead of using snapshots, Joanne could create an [incremental model](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models) downstream of `fct_income` to “grab” every point-in-time version of `fct_income` – let’s call this incremental model `int_income_history` and assume it has the following config block: + +```sql +{{ + config( + materialized='incremental' + ) +}} +``` + +By materializing `int_income_history` as incremental but *not* including a `unique_key` config, dbt will only execute `INSERT` statements – new rows will be added, but old rows will remain unchanged. + +The rest of `int_income_history` would look like this: + +```sql +... + +select + * +from {{ ref('fct_income') }} +{% if is_incremental() %} + where true +{% endif %} +``` + +There are a few additional configs that Joanne might find helpful: + +- she can use the `on_schema_change` config to handle schema changes if new columns are added and/or deleted from `fct_income` +- she can also set the `full_refresh` config to false in order to prevent accidental loss of the historical data +- she can build this table in a custom `schema` if she wants to enforce specific role-based permissions for this historical table +- she can specify a time-grain `unique_key` if she wants to reduce the amount of versions being captured + - for example, if she only wants to capture the final version of each day she could set `unique_key = date_trunc('day', run_timestamp)`. This is excluded from the example below, as we are making the assumption that Joanne does indeed want to capture every version of `fct_income` + +The final config block for `int_income_history` might look something like this: + +```sql +{{ + config( + materialized='incremental', + full_refresh=false, + schema='history', + on_schema_change='sync_all_columns' + ) +}} +``` + +As a final step, Joanne would create `fct_income_history` to join in the seed file to determine which versions are “correct”. Her new DAG looks like this, where `int_income_history` is an incremental model without a unique key: + +![](/img/blog/2022-07-12-change-data-capture-metrics/int-income-history-dag.png) + +The final output of `fct_income_history` would look identical to `stg_snapshot_fct_income` from her initial approach: + +| month_year | product_category | income | run_timestamp | correct_version | +|:---:|:---:|:---:|:---:|:---:| +| January 2022 | clothing | 100 | 01/30/22 12:00:00 | FALSE | +| January 2022 | electronics | 200 | 01/30/22 12:00:00 | FALSE | +| January 2022 | books | 300 | 01/30/22 12:00:00 | FALSE | +| January 2022 | clothing | 50 | 02/03/22 16:00:00 | FALSE | +| January 2022 | electronics | 150 | 02/03/22 16:00:00 | FALSE | +| January 2022 | books | 200 | 02/03/22 16:00:00 | FALSE | +| January 2022 | clothing | 52 | 02/10/22 08:00:00 | TRUE | +| January 2022 | electronics | 152 | 02/10/22 08:00:00 | TRUE | +| January 2022 | books | 202 | 02/10/22 08:00:00 | TRUE | + +## Solution #2: Upstream Snapshots + +Alternatively, Joanne could snapshot her source data and add flexibility to her modeling so that all historical versions are calculated *at the same time*. Let’s look at our example. + +Joanne could track changes in the source data by adding snapshots directly on top of her raw data. + +![](/img/blog/2022-07-12-change-data-capture-metrics/snapshots-dag.png) + +This would *change the * of these `stg_` tables, so she would see a row for each version of each field. The staging models will contain the history of each record. + +Remember the source data change Joanne noticed — a manufacturing cost was dated incorrectly (Junkuary 2022 instead of January 2022). With this solution, the `costs_snapshot` model will pick up this change: + +```sql +{% snapshot costs_snapshot %} + +{{ + config( + target_database='analytics', + target_schema='snapshots', + unique_key='cost_id', + strategy='timestamp', + updated_at='updated_at' + ) +}} + +select * from {{ source('source', 'costs') }} + +{% endsnapshot %} +``` + +| cost_id | month_year | cost | updated_at | dbt_valid_from | dbt_valid_to | +|:---:|:---:|:---:|:---:|:---:|:---:| +| 1 | Junkuary 2022 | 50 | 01/15/22 12:00:00 | 01/15/22 12:00:00 | 02/03/22 12:00:00 | +| 1 | January 2022 | 50 | 02/03/22 12:00:00 | 02/03/22 12:00:00 | NULL | + +:::note Note +Because snapshots only capture changes detected at the time the dbt snapshot command is executed, it is technically possible to miss some changes to your source data. You will have to consider how often you want to run this snapshot command in order to capture the history you need. +::: + +The original `fct_income` model now calculates the income for each version of source data, every time Joanne executes a `dbt run`. In other words, the downstream `fct_` models are **version-aware**. Because of this, Joanne changes the name of `fct_income` to `fct_income_history` to be more descriptive. + +In order to track changes in business logic, she can apply each version of logic to the relevant records and union together. + +Remember the bug Joanne found in her dbt code. With this solution, she can track this change in business logic in the `stg_costs` model: + +```sql +-- apply the old logic for any records that were valid on or before the logic change +select + cost_id, + ..., + cost + tax as final_cost, -- old logic + 1 || ‘-’ || dbt_valid_from as version +from costs_snapshot +where dbt_valid_from <= to_timestamp('02/10/22 08:00:00') + +union all + +-- apply the new logic for any records that were valid after the logic change +select + cost_id, + ..., + cost as final_cost, -- new logic + 2 || ‘-’ || dbt_valid_from as version +from costs_snapshot +where to_timestamp('02/10/22 08:00:00') between dbt_valid_to and coalesce(dbt_valid_from, to_timestamp('01/01/99 00:00:00')) +``` +| cost_id | month_year | cost | tax | final_cost | version | +|:---:|:---:|:---:|:---:|:---:| :---:| +| 1 | Junkuary 2022 | 50 | 1 | 51 | 1 - 01/15/22 12:00:00 | +| 1 | January 2022 | 50 | 1 | 51 | 1 - 02/03/22 12:00:00 | +| 1 | January 2022 | 50 | 1 | 50 | 1 - 02/03/22 12:00:00 | + +The contents of the seed `income_report_versions` would look slightly different to match the change in version definition: + +| month_year | correct_version | comment | +|:---:|:---:|:---:| +| January 2022 | 2 - 02/03/22 12:00:00 | Approved by Lucy | + +After joining in the seed file (check out [Tackling the complexity of joining snapshots](https://docs.getdbt.com/blog/joining-snapshot-complexity)), her new DAG looks like this: + +![](/img/blog/2022-07-12-change-data-capture-metrics/final-dag.png) + +The final output of `fct_income_history` would accomplish the same goal as `stg_snapshot_fct_income` from her initial approach: + +| month_year | product_category | income | version | correct_version | +|:---:|:---:|:---:|:---:|:---:| +| January 2022 | clothing | 100 | 1 - 01/15/22 12:00:00 | FALSE | +| January 2022 | electronics | 200 | 1 - 01/15/22 12:00:00 | FALSE | +| January 2022 | books | 300 | 1 - 01/15/22 12:00:00 | FALSE | +| January 2022 | clothing | 50 | 1 - 02/03/22 12:00:00 | FALSE | +| January 2022 | electronics | 150 | 1 - 02/03/22 12:00:00 | FALSE | +| January 2022 | books | 200 | 1 - 02/03/22 12:00:00 | FALSE | +| January 2022 | clothing | 52 | 2 - 02/03/22 12:00:00 | TRUE | +| January 2022 | electronics | 152 | 2 - 02/03/22 12:00:00 | TRUE | +| January 2022 | books | 202 | 2 - 02/03/22 12:00:00 | TRUE | + +## Final thoughts + +Both of these solutions allow Joanne to achieve her desired output – a table containing all versions of income for a given month – while improving the workflow and the efficiency of the final model. + +However, each has its advantages and disadvantages. + +**Solution #1: Downstream Incremental Model** + +| Pros | Cons | +|:---:|:---:| +| incremental models without unique keys are fast | this isn't really the intended use of the incremental | +| | Joanne has no way to re-calculate prior versions if her historical table is accidentally lost | + +**Solution #2: Upstream Snapshots** + +| Pros | Cons | +|:---:|:---:| +| Joanne doesn't have to worry about losing historical data | snapshots are highly complex and require more institutional knowledge for Joanne's team | +| | every time Joanne wants to make a code change that affects her calculations, she'll have to remember to apply the change to each set of relevant records and union the outputs together | + +When deciding between the two solutions, you should consider the following: + +- How often is your source data changing? +- How many bug fixes do you anticipate? +- How fast do you need this job to be? +- How much visibility do you need into why a change in historic values occurred? + +💡 What do you think? Is there another, more optimal, solution? diff --git a/website/blog/2022-07-13-star-sql-love-letter.md b/website/blog/2022-07-13-star-sql-love-letter.md new file mode 100644 index 00000000000..87469dc2730 --- /dev/null +++ b/website/blog/2022-07-13-star-sql-love-letter.md @@ -0,0 +1,83 @@ +--- +title: "A star (generator) is born" +description: "One of the macros dbt utils offers is the `star` generator. This dbt macro is one of our favorites because it lets you select all the fields you want without writing the columns you don't." +slug: star-sql-love-letter + +authors: [kira_furuichi] + +tags: [sql magic] +hide_table_of_contents: false + +date: 2022-05-23 +is_featured: true +--- + + +We’ve likely been here: Table A has 56 columns and we want to select all but one of them (`column_56`). So here we go, let’s get started… + +```sql +select + column_1, + column_2, + column_3, + please_save_me… +from {{ ref('table_a') }} +``` + +At this point, you realize your will to continue typing out the next 52 columns has essentially dwindled down to nothing and you’re probably questioning the life choices that led you here. + +But what if there was a way to make these 56+ lines of code come down to a handful? Well, that’s where a handy [dbt macro](https://docs.getdbt.com/docs/building-a-dbt-project/jinja-macros) comes into play. + + + +## The `star` dbt macro + +dbt supports [dbt_utils](https://github.com/dbt-labs/dbt-utils), a [package of macros and tests](https://docs.getdbt.com/docs/building-a-dbt-project/package-management) that data folks can use to help them write more code in their dbt project. One of the macros dbt utils offers is the `star` generator. + +This macro: + +* Generates a comma-separated list of all fields that exist in the `from` [relation](https://docs.getdbt.com/reference/dbt-classes#relation) and excludes any fields listed in an `except` argument, +* Can optionally add a prefix to all generated fields using the `relation_alias` argument, +* And also concatenate prefixes and/or suffixes to all generated fields using the `prefix` and `suffix` arguments + +So what does this mean for the example from above? Instead of writing out all 55 columns, you can use the `star` macro to select all fields except the column you don’t want: + +```sql +select + {{ dbt_utils.star(from=ref('table_a'), except=['column_56'] }} +from {{ ref('table_a') }} +``` + +This dbt model compiles to: + +```sql +select + column_1, + column_2, + …, --imagine we weren’t lazy and wrote out all other columns + column_55 +from table_a +``` + +With the `star` macro, all of the columns except `column_56` are generated in a comma-separated list within the `select` statement. What was once 56+ lines of tedious, mind-numbing SQL becomes 3 lines using the `star` macro. You can also exclude multiple columns by passing in the column names to the `except` argument. + +If you want to alias all fields in a model with the same alias without having to explicitly rename them all, you can also use the `star` macro with the `relation_alias` argument passed in: + +```sql +select + {{ dbt_utils.star(from=ref('table_a'), relation_alias='my_new_alias') }} +from {{ ref('table_a') }} +``` + +Now, this will return all fields from `table_a` with the `my_new_alias.field_name` naming format. + +[Under the hood](https://github.com/dbt-labs/dbt-utils/blob/main/macros/sql/star.sql), the `star` macro is actually using another dbt utils macro ([get_filtered_columns_in_relation](https://github.com/dbt-labs/dbt-utils#get_filtered_columns_in_relation-source)) to loop through fields to either select, alias, and/or append some string values to them. + +## Why we love the `star` macro + +It’s no hidden fact: the Data Team at dbt Labs loves to use dbt util’s macros and tests when appropriate. We like dbt utils so much we created a March Madness Utils Bracket for them (not taking questions at this time) and we used the `star` macro alone over 30 times in our internal dbt repository. + +![](/img/blog/2022-07-13-star-sql-love-letter/utils-madness-1.png) + + +Overall, the `star` macro is a great way to dip your toes into the dbt utils package, write DRY code, and reduce your carpal tunnel. \ No newline at end of file diff --git a/website/blog/2022-07-19-migrating-from-stored-procs.md b/website/blog/2022-07-19-migrating-from-stored-procs.md new file mode 100644 index 00000000000..2140bc24dc1 --- /dev/null +++ b/website/blog/2022-07-19-migrating-from-stored-procs.md @@ -0,0 +1,225 @@ +--- +title: "Migrating from Stored Procedures to dbt" +description: "Stored procedures are great, but they eventually become hard to scale. dbt fixes that, but the migration process can seem daunting. Matt Winkler demystifies the process in this blog!" +slug: migrating-from-stored-procs + +authors: [matt_winkler] + +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-07-20 +is_featured: true +--- + +Stored procedures are widely used throughout the data warehousing world. They’re great for encapsulating complex transformations into units that can be scheduled and respond to conditional logic via parameters. However, as teams continue building their transformation logic using the stored procedure approach, we see more data downtime, increased data warehouse costs, and incorrect / unavailable data in production. All of this leads to more stressed and unhappy developers, and consumers who have a hard time trusting their data. + +If your team works heavily with stored procedures, and you ever find yourself with the following or related issues: + +- dashboards that aren’t refreshed on time +- It feels too slow and risky to modify pipeline code based on requests from your data consumers +- It’s hard to trace the origins of data in your production reporting + +It’s worth considering if an alternative approach with dbt might help. + + + +## Why use modular dbt models instead of stored procedures? + +We work with many analytics teams to refactor their stored procedure code into dbt. Many of them come in thinking that the upfront effort to modernize their approach to data transformation will be too much to justify. However, we see that in the long term this isn’t the case. + +For example, a dbt Cloud user achieved the following results when moving away from the stored procedure approach: + +### Improved Uptime + +Before migrating to dbt, the team was spending 6 - 8 hours per day on pipeline refreshes, making their investment in their data warehouse essentially worthless during that downtime. After migration, their uptime increased from 65% to 99.9%. This also has a drastic impact on data consumers’ confidence in the underlying pipelines. + +### Tackling New Use Cases +Further, the team was able to support new mission-critical use cases, which simply wouldn’t have been possible had the team continued using the same techniques they had historically. + +Now that we’ve discussed why moving from stored procs to dbt can make sense for many analytics teams, let’s discuss how the process works in a bit more detail. + +## What are the problems with stored procedures? +Some of the drawbacks to using stored procedures may not have been apparent historically, but they come to light when we consider modern expectations of data pipelines such as transparent documentation, testability, and reusability of code. For one, stored procedures don’t lend themselves well to documenting data flow, as the intermediate steps are a black box. Secondly, this also means that your stored procedures aren’t very testable. Finally, we often see logic from intermediate steps in one stored procedure copied almost line-for-line to others! This creates extra bloat across a development team’s codebase, which drags down team efficiency. + +We might visualize this situation as something like this: + +![Diagram of what a stored procedure data flow would look like. Hint: it's complicated](/img/blog/2022-07-19-migrating-from-stored-procs/stored-procs-diagram.png) + +## Why consider dbt as an alternative? + +dbt offers an approach that is self-documenting, testable, and encourages code reuse during development. One of the most important elements of working in dbt is embracing modularity when approaching data pipelines. In dbt, each business object managed by a data pipeline is defined in a separate model (think: orders data). These models are flexibly grouped into layers to reflect the progression from raw to consumption ready data. Working in this way, we create reusable components which helps avoid duplicating data and confusion among development teams. + +With dbt, we work towards creating simpler, more transparent data pipelines like this: + +![Diagram of what data flows look like with dbt. It's easier to trace lineage in this setup.](/img/blog/2022-07-19-migrating-from-stored-procs/dbt-diagram.png) + +Tight [version control integration](https://docs.getdbt.com/docs/guides/best-practices#version-control-your-dbt-project) is an added benefit of working with dbt. By leveraging the power of git-based tools, dbt enables you to integrate and test changes to transformation pipelines much faster than you can with other approaches. We often see teams who work in stored procedures making changes to their code without any notion of tracking those changes over time. While that’s more of an issue with the team’s chosen workflow than a problem with stored procedures per se, it does reflect how legacy tooling makes analytics work harder than necessary. + +## Methodologies for migrating from stored procedures to dbt + +Whether you’re working with T-SQL, PL/SQL, BTEQ, or some other SQL dialect, the process of migrating from the stored procedure approach to the dbt approach can typically be broken down into similar steps. Over the years, we’ve worked with many customers to convert confusing and hard-to-manage stored procedure code into modular dbt pipelines. Through our work, we’ve arrived at a few key best practices in undertaking this process, which we present below. + +If you’re interested in diving into further detail on this topic, please visit our [companion guide](https://docs.getdbt.com/guides/migration/tools/migrating-from-stored-procedures/1-migrating-from-stored-procedures) to learn more in-depth information about the refactoring process. + +### Step 0: Understand a bit about how dbt works + +If this is your first time running dbt, you may want to start with the [Introduction to dbt](https://docs.getdbt.com/docs/introduction) and the [Getting Started tutorial](https://docs.getdbt.com/tutorial/setting-up) before diving into refactoring. If you’re already familiar with building dbt models and pipelines, feel free to dive in! + +### Step 1: Understand how dbt and stored procedures are different + +Most folks who have written Stored Procedures in the past think about the world in terms of a stateful process that progresses line-by-line. You start out creating your tables, and then use to insert, update, and delete data, continually applying operations to the same base table throughout the course of a transformation. + +On the other hand, dbt takes a declarative approach to managing datasets by using SELECT statements to describe the set of data that should make up the table. The tables (or views) defined in this way represent each stage or unit of transformation work, and are assembled into a [Directed Acyclic Graph (DAG)](https://docs.getdbt.com/docs/introduction#what-makes-dbt-so-powerful) to determine the order in which each statement runs. As we’ll see, this achieves the same ends as procedural transformations, but instead of applying many operations to one dataset, we take a more modular approach. This makes it MUCH easier to reason about, document, and test transformation pipelines. + +### Step 2: Plan how to convert your stored procedure to dbt code + +In general, we've found that the recipe presented below is an effective conversion process. + +1. Map data flows in the stored procedure +2. Identify raw source data +3. Create a staging layer on top of raw sources for initial data transformations such as data type casting, renaming, etc. +4. Replace hard-coded table references with dbt [source()](https://docs.getdbt.com/docs/building-a-dbt-project/using-sources) and [ref()](https://docs.getdbt.com/reference/dbt-jinja-functions/ref) statements. This enables 1) ensuring things are run in the right order and 2) automatic documentation! +5. Map INSERTS and UPDATES in the stored procedure to SELECT in dbt models +6. Map DELETES in the stored procedure to WHERE filters in dbt models +7. If necessary, use [variables](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/using-variables) in dbt to dynamically assign values at runtime, similar to arguments passed to a stored procedure. +8. Iterate on your process to refine the dbt [DAG](https://docs.getdbt.com/docs/introduction#what-makes-dbt-so-powerful) further. You could continue optimizing forever, but typically we find a good stopping point when the outputs from the stored procedure and final dbt models are at parity. + +Sometimes, we find ourselves confronted with code that’s so complex, the end user isn’t able to understand exactly what it’s doing. In these cases, it may not be possible to perform an apples-to-apples mapping of the process embedded in the original stored procedure, and it’s actually more efficient to scrap the whole thing and focus on working backwards to reproduce the desired output in dbt. Note the section on auditing results below as a key success driver in this situation. + +### Step 3: Execute + +Where the magic happens :). Jon “Natty” Natkins is developing a very robust how-to guide to walk through an example refactoring process from the ground up. To give a taste, we’ll show what the first few steps of the recipe described above look like in action, mapping from the original Stored Procedure approach to our new one using dbt. + +#### Stored procedure approach (using SQL server code): + +1. Define a temp table selecting data from a raw table and insert some data into it + +```sql +IF OBJECT_ID('tempdb..#temp_orders') IS NOT NULL DROP TABLE #temp_orders + SELECT messageid + ,orderid + ,sk_id + ,client + FROM some_raw_table + WHERE . . . + INTO #temp_orders +``` + +2. Run another INSERT from a second raw table + +```sql + INSERT INTO #temp_orders(messageid,orderid,sk_id, client) + SELECT messageid + ,orderid + FROM another_raw_table + WHERE . . . + INTO #temp_orders +``` + +3. Run a DELETE on the temp table to get rid of test data that lives in production + +```sql + DELETE tmp + FROM #temp_orders AS tmp + INNER JOIN + criteria_table cwo WITH (NOLOCK) + ON tmp.orderid = cwo.orderid + WHERE ISNULL(tmp.is_test_record,'false') = 'true' +``` + +We often see this process go on for quite some time (think: 1,000s of lines of code). To recap, the issues with this approach are: + +- Tracing the data flow becomes REALLY hard because the code is a) really long and b) not documented automatically. +- The process is stateful - Our example #temp_orders table evolves throughout the process, which means we have to juggle several different factors if we want to adjust it. +- It’s not easy to test. + +#### dbt approach + + +1. Identify the raw source tables, and then map each of the INSERT statements above into separate dbt models, and include an automatically generated WHERE statement to eliminate the test records from the third step above. + +```sql +— orders_staging_model_a.sql +{{ + config( + materialized='view' + ) +}} + +with raw_data as ( + select * + from {{ source('raw', 'some_raw_table')}} + where is_test_record = false +), + +cleaned as ( + select messageid, + orderid::int as orderid, + sk_id, + case when client_name in ['a', 'b', 'c'] then clientid else -1 end + from raw_data +) + +select * from cleaned +``` + +2. Write tests on the models to ensure our code is working at the proper grain + +```sql +version: 2 + +models: + - name: stg_orders + columns: + - name: orderid + tests: + - unique + - not_null +``` + +3. UNION the models together + +```sql +{{ + config( + materialized='table' + ) +}} + +with a as ( select * from {{ ref('stg_orders_a') }} ), +b as (select * from {{ ref('stg_orders_b') }} ), + +unioned as ( + select * from a + union all + select * from b +) + +select * from unioned +``` + +We’ve just created a modular, documentable, and testable approach to manage the same transformations as an alternative. + +![A dbt DAG that shows the output of the code you just implemented](/img/blog/2022-07-19-migrating-from-stored-procs/dbt-approach-model.png) + +### Step 4: Audit your results + +Any time you introduce a change to a technical process, it’s essential to check your results. Fortunately, dbt Labs maintains the [audit helper package](https://hub.getdbt.com/dbt-labs/audit_helper/0.0.2/) with exactly this use case in mind. The audit helper enables you to perform operations such as comparing row counts, and row by row validation on a table that’s updated by a legacy stored procedure to one that’s the result of a dbt pipeline in order to make sure the two are exactly the same (or, within a reasonable % deviation). This way, you have confidence that your new dbt pipeline is accomplishing the same goals of the transformation pipeline that existed before. + +## Summary + +We’ve highlighted several of the pain points of working with stored procedures (mainly the lack of traceability and data testing) and how the dbt approach can help. Well documented, modular, testable code makes for happy engineers and happy business users alike :handshake:. It also helps us save time and money by making pipelines more reliable and easy to update. + +Over time, this approach is much more extensible than continuing to stack code on top of an unwieldy process. It’s also automatically documented, and using tests ensures the pipeline is resilient to changes over time. We continue mapping the data flow from the existing stored procedure to the dbt data pipeline, iterating until we achieve the same outputs as before. + +We’d love to hear your feedback! You can find us on [slack](https://www.getdbt.com/community/), [github](https://github.com/dbt-labs/dbt-core), or [reach out](https://www.getdbt.com/contact/) to our sales team. + +## Appendix + +dbt Labs has developed a number of related resources you can use to learn more about working in dbt, and comparing our approach to others in the Analytics ecosystem. + +- [Refactoring legacy SQL to dbt](https://docs.getdbt.com/tutorial/refactoring-legacy-sql) +- [The case for the ELT workflow](https://www.getdbt.com/analytics-engineering/case-for-elt-workflow/) +- [Refactoring SQL for modularity](https://courses.getdbt.com/courses/refactoring-sql-for-modularity) +- [Data modeling techniques for modularity](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/) diff --git a/website/blog/2022-07-26-configuring-grants.md b/website/blog/2022-07-26-configuring-grants.md new file mode 100644 index 00000000000..5888f6ca782 --- /dev/null +++ b/website/blog/2022-07-26-configuring-grants.md @@ -0,0 +1,209 @@ +--- +title: "Updating our permissioning guidelines: grants as configs in dbt Core v1.2" +description: "End consumers (like users and BI tools) will need to be granted the privilege to read the tables and views dbt creates in your warehouse. In v1.2, we introduced a `grants` config that is easier to use than hooks and uses syntax that is database agnostic." + +slug: configuring-grants + +authors: [jeremy_cohen, doug_beatty] + +tags: [dbt tutorials] +hide_table_of_contents: false + +date: 2022-07-26 +is_featured: true +--- + +If you’ve needed to grant access to a dbt model between 2019 and today, there’s a good chance you’ve come across the ["The exact grant statements we use in a dbt project"](https://discourse.getdbt.com/t/the-exact-grant-statements-we-use-in-a-dbt-project/430) post on Discourse. It explained options for covering two complementary abilities: +1. querying relations via the "select" privilege +1. using the schema those relations are within via the "usage" privilege + + + +## The solution then +Prior to dbt Core v1.2, we proposed three possible approaches (each coming with [caveats and trade-offs](#caveats-and-trade-offs-of-the-original-guidance)): + +1. Using `on-run-end` hooks to `grant select on all` tables/views dbt has just built +1. Using `post-hook` to grant `select` on a model as soon as it’s built +1. Using either default grants (future grants on Snowflake) or a combination of `post-hooks` and `on-run-end` hooks instead + +These options were the state of the art... until today! + +## What’s changed? + +In v1.2, we [introduced](https://www.getdbt.com/blog/teaching-dbt-about-grants) a [`grants` config](https://docs.getdbt.com/reference/resource-configs/grants) that works a lot like `post-hook`, with two key differences: + +- You configure `grants` as a structured dictionary rather than writing all the SQL yourself +- dbt will take *the most efficient path* to apply those grants + +### Why `grants` are better than hooks + +First of all, [hooks are hard](#issues-related-to-hooks)! Especially that nonsense around [nested curlies](https://docs.getdbt.com/docs/building-a-dbt-project/dont-nest-your-curlies). + +#### A problem then +Let’s say you’ve been working on an incremental model. Previously, you granted access on this incremental model directly to `reporter`, so people can query it downstream: + +```sql +-- models/my_incremental_model.sql + +{{ config( + materialized = 'incremental', + post_hook = ["grant select on {{ this }} to reporter"] +) }} + +select ... +``` + +Over time, this model took on more and more responsibilities and you decided to refactor the incremental model to feed a series of dedicated views instead. Thoughtfully, you also removed the `post_hook` that granted direct access to the incremental model: + +```sql +-- models/my_incremental_model.sql + +{{ config(materialized = 'incremental') }} + +select ... +``` + +**The problem?** Until you `--full-refresh` it, your incremental model is still granted to the `reporter` role! + +#### The solution today + +dbt’s new `grants` implementation takes account of this. It knows whether grants are “carried over” when a model is re-run based on its materialization and your database. It makes up the difference between the existing grants and the ones you actually want. + +Try it out! + +```sql + +-- models/my_incremental_model.sql + +{{ config( + materialized = 'incremental', + grants = {'select': ['another_user']} +) }} + +select ... +``` + +Run that, verify that `another_user` can select from your model. Then change your model and run it again: + +```sql +-- models/my_incremental_model.sql + +{{ config( + materialized = 'incremental', + grants = {'select': []} +) }} + +select ... +``` + +If you check your database, you should see that *no one* can select from the incremental model. You could also see, in the debug-level logs, that dbt has run a `revoke` statement. + +(Note that, if `grants` is missing or set to `{}`, dbt will understand that you don’t want it managing grants for this table. So it’s best to explicitly specify the privilege, and that you want *no one* to have it!) + +Great! Now that you’re using the `grants` feature in dbt v1.2, you’ve just given this more thought than you should ever need to again 😎 + +## Is there still a place for hooks? + +Yes, indeed! Some areas that stand out: +- [Granting permissions on other object types](#granting-permissions-on-other-object-types) like granting usage on a schema +- [Advanced permissions](#advanced-permissions-or-other-operations) like row-level access + +### Granting permissions on other object types + +For now, it’s still necessary to grant `usage` on schemas to users that will need to select from objects in those schemas. Even though dbt creates schemas at the start of runs, there isn’t really a way to configure *schemas as their own objects* within dbt. + +Here's a couple ways you could approach it: +- Option A -- simple and familiar -- hooks to the rescue +- Option B -- too clever by half -- use the dbt graph to infer which schemas need "usage" + +#### Option A: simple and familiar + +```yaml +on-run-end: + # better as a macro + - "{% for schema in schemas %}grant usage on schema {{ schema }} to reporter;{% endfor %}" +``` + +Upside: Short, sweet, to the point. + +Downside: we need to repeat the same list of roles here that we specified in our `grants` config. + +#### Option B: Too clever by half + +Now that `grants` is a real config in dbt, available via dbt metadata, you can do all sorts of fun things with it. For instance, figure out which schemas have at least one object granting `select` to a role, and then grant `usage` on that schema to that role! + +```sql +-- macros/operations/reporting_grants.sql +{% macro grant_usage_on_schemas_where_select() %} + /* + Note: This is pseudo code only, for demonstration purposes + For every role that can access at least one object in a schema, + grant 'usage' on that schema to the role. + That way, users with the role can run metadata queries showing objects + in that schema (a common need for BI tools) + */ + {% set schema_grants = {} %} + {% if execute %} + {% for node in graph.nodes.values() %} + {% set grants = node.config.get('grants') %} + {% set select_roles = grants['select'] if grants else [] %} + {% if select_roles %} + {% set database_schema = node.database ~ "." ~ node.schema %} + {% if database_schema in database_schemas %} + {% do schema_grants[database_schema].add(select_roles) %} + {% else %} + {% do schema_grants.update({database_schema: set(select_roles)}) %} + {% endif %} + {% endif %} + {% endfor %} + {% endif %} + {% set grant_list %} + {% for schema in schema_grants %} + {% for role in schema_grants[schema] %} + grant usage on schema {{ schema }} to {{ role }}; + {% endfor %} + {% endfor %} + {% endset %} + {{ return(grant_list) }} +{% endmacro %} +``` + +This is certainly too clever -- but you get the idea, and an illustration of what's possible! + +You can even do this at the *start* of the run, right after dbt creates its schemas, rather than waiting until the end. (Although it’s not a huge deal to wait.) + +```yaml +on-run-start: + - {{ grant_usage_on_schemas_where_select() }} +``` + +### Advanced permissions (or other operations) + +Want to restrict access to specific rows in a table for specific users? Or dynamically mask column values depending on who’s asking? + +The approach varies by database: in Snowflake, you’ll still want a `post-hook` to apply a [row access policy](https://docs.snowflake.com/en/user-guide/security-row-intro.html) or a column [masking policy](https://docs.snowflake.com/en/sql-reference/sql/create-masking-policy.html) to your table whereas in Databricks you'd use [dynamic view functions](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html#dynamic-view-functions). + +It’s good to have hooks and operations as a method to utilize cutting-edge database capabilities. Any cases that become a wide and clearly demonstrated need can be upgraded by being built into `dbt-core`. + +## Appendix + +### Caveats and trade-offs of the original guidance +`on-run-end` hooks: +> for the period of time between when a model runs, and the end of the run, no one will be able to query that model, instead they’ll get a “permission denied” error. This creates downtime in your BI tool.” + +`manage grants` privilege: +> It is worth noting that this privilege *is* a global privilege – now anyone using the `transformer` role can change grants on any object as though they are the owner of the object. Up to you if you’re comfortable with this! If not, you may want to use a combination of `post-hooks` and `on-run-end` hooks instead 🙂” + +The biggest problems: + +- Even if you wrote the [DRYest](https://en.wikipedia.org/wiki/Don't_repeat_yourself) code you could, there are still *thousands* of projects who have all written the same exact [DCL](https://en.wikipedia.org/wiki/Data_control_language) statements, wrapped in the same exact macros. +- Default + future grants—our original recommendation, back in 2019— are *tricky.* They often require extra permissions (superuser status!), they take effect automatically, and they don’t fly for folks at many organizations with tighter security policies. + +### Issues related to hooks +This is just a sample of the issues we've seen: +- [Post hooks that call macros get parsed with execute = False #2370](https://github.com/dbt-labs/dbt-core/issues/2370) +- [get_relation returns none in hook context #2938](https://github.com/dbt-labs/dbt-core/issues/2938) +- [this.is_view and this.is_table not working in BigQuery inside a hook #3529](https://github.com/dbt-labs/dbt-core/issues/3529) +- [custom table schema path of {{ this }} parsed in correctly in post-hook macro #3985](https://github.com/dbt-labs/dbt-core/issues/3985) +- [Post-hook doesn't resolve custom schema #4023](https://github.com/dbt-labs/dbt-core/issues/4023) +- [[CT-80] [Bug] post-hook macro generates SQL with incorrect source table #4606](https://github.com/dbt-labs/dbt-core/issues/4606) diff --git a/website/blog/2022-07-26-pre-commit-dbt.md b/website/blog/2022-07-26-pre-commit-dbt.md new file mode 100644 index 00000000000..3873e46e3ae --- /dev/null +++ b/website/blog/2022-07-26-pre-commit-dbt.md @@ -0,0 +1,268 @@ +--- +title: "Enforcing rules at scale with pre-commit-dbt" +description: "Making sure everyone is following best practices on large dbt projects is hard. How can you enforce rules at scale? Benoit Perigaud shares a dbt package to help you do just that." +slug: enforcing-rules-pre-commit-dbt +authors: [benoit_perigaud] +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-08-03 +is_featured: true +--- + +At dbt Labs, we have [best practices](https://docs.getdbt.com/docs/guides/best-practices) we like to follow for the development of dbt projects. One of them, for example, is that all models should have at least `unique` and `not_null` tests on their primary key. But how can we enforce rules like this? + +That question becomes difficult to answer in large dbt projects. Developers might not follow the same conventions. They might not be aware of past decisions, and reviewing pull requests in git can become more complex. When dbt projects have hundreds of models, it's hard to know which models do not have any tests defined and aren't enforcing your conventions. + + + +One potential solution is to leverage the open-source package [pre-commit-dbt](https://github.com/offbi/pre-commit-dbt), created by dbt community members, that can be used to automatically run tests before committing files to git or as part of CI steps. In this article, I'll walk you through the strategy I use to implement this package and enforce rules at scale. + +## What are pre-commit and pre-commit-dbt? + +pre-commit is a framework that can be used to automatically run tests before committing files to git, leveraging git hooks. + +In our case, we will use the ability of pre-commit to run automated tests but I will also explain below how to use it with the flags `--all-files` or `--files` to leverage the same tests on a predefined list of dbt models. + +On the other hand, pre-commit-dbt defines dbt specific tests and actions (called [hooks](https://github.com/offbi/pre-commit-dbt/blob/main/HOOKS.md)) for the [pre-commit](https://pre-commit.com/) framework. + +There are currently over 20 tests that have been created but here 2 examples that we will leverage: + +- `check-model-has-tests`: Check the model has a number of tests. +- `check-model-has-properties-file`: Check the model has a properties file (also called schema file). + +## Implementing pre-commit-dbt & adding tests + +Let’s take the example of a project with more than 300 models. Dozens of people have committed to the project, a PR review process is in place, but sometimes, with multiple models in the same PR, tracking if tests have been added or not is not easy and we know that not all models are tested today even if they should. + +To remediate this let’s follow those 4 steps: + +![Flow graph that describes the steps in defining a strategy for implementing this package on a dbt project with more than 300 models.](/img/blog/2022-07-26-pre-commit-dbt/define-strategy.png) + +### 1. Define our rules and create a pre-commit config file + +pre-commit-dbt provides a range of tests that can be run on the models of our dbt project. In the case of a project which has existed for some time and which might not be entirely following best practices, I would recommend selecting a small subset of tests, fixing the project and adding more tests later on. + +In our example, let’s just start by saying that we want: +- all our models to have been added to a YAML file +- all our models to include some tests + +To configure pre-commit, we have to create a file called `.pre-commit-config.yaml` at the root of our dbt project and add the following content: + +```yaml +repos: +- repo: https://github.com/offbi/pre-commit-dbt + rev: v1.0.0 + hooks: + - id: dbt-docs-generate + - id: check-model-has-properties-file + name: Check that all models are listed in a YAML file + files: ^models/ + - id: check-model-has-tests + name: Check that all models have tests + files: ^models/ +``` + +A few notes about this file: +- We start with a dbt-docs-generate to ensure that the command `dbt docs generate` is run before checking our models. This is required because pre-commit-dbt reads from the artifacts `catalog.json` and `manifest.json` and those files can be generated by generating the documentation +- The `files` argument is a regular expression. `^models/` is going to apply the test to all our models whose path starts with `models`, i.e. all the models of our project. If we wanted to run the test only for the models in the mart folder, we could write `^models/mart`. There are a few handy tools online to define and test regular expressions; one of them is https://regex101.com/ +- We could also provide values for the parameter `exclude` if we wanted to exclude specific files or folders +- We could have added a parameter to the hook `check-model-has-tests` to mention how many tests each model is supposed to have (see [here](https://github.com/offbi/pre-commit-dbt/blob/main/HOOKS.md#check-model-has-tests)) + +Now that we have defined our configuration file, the next step will depend on whether we are using dbt via dbt Cloud or dbt Core via the CLI. + +If we are using dbt Cloud, let’s jump to [step 4](#4-make-it-part-of-the-periodic-checks), where we will set up pre-commit-dbt as part of the CI process, otherwise, with dbt Core we can go to step 2. + +### 2. Understand the scope of the changes required + +With our configuration file created, we can now set up our computer to do some checks locally. + +#### Activating a Python virtual environment + +If we are using dbt in a Python virtual environment, let’s activate this environment. If not, we should really create a Python virtual environment ([more info here](https://docs.python.org/3/library/venv.html)) and activate it before installing pre-commit. + +We could technically skip this step but might then end up getting issues on our computer with different Python packages conflicting with each other. + +#### Installing and running pre-commit + +Once in the Python virtual environment, installing pre-commit is as straightforward as running `python -m pip install pre-commit`. + +A normal next step after installing pre-commit is to run a `pre-commit install` to install the git hooks and run tests automatically, but in our case, let’s wait a bit! We will cover this in step 4. + +Instead, we can do a `pre-commit run --all-files`, which will run all the tests defined in our configuration file on all the files in our dbt project. + +[![Animation showing the output in the Terminal after running the above commands](https://asciinema.org/a/lTmefht77ZEr6kmP7DymaxjRF.svg)](https://asciinema.org/a/lTmefht77ZEr6kmP7DymaxjRF) + +In my case, I can see that my model called `customers.sql` has not been added to any YAML file and has no test defined. + +In the case of a large project, the number of issues might be much bigger. If we use zsh as our shell, wildcard expansion can be used and we could run `pre-commit run --files models/mart/*` if we wanted to run all the checks only in the models stored under mart. + +### 3. Decide what needs to be fixed immediately + +Once we have the list of models that either don’t exist in the YAML files or that don’t have any test defined, we can decide if we want to fix all of them at once or not. + +What we will see in step 4 is that even if not all models are fixed at once, the CI step and the git hooks can lead to better project hygiene, forcing every model that is being modified to be tested. + +In my example above, with just one model to fix, it is easy to create a PR with the changes, but if hundreds of models show up, you might decide to only fix the most important ones at first (your mart for example) and fix the other ones later on. + +### 4. Make it part of the periodic checks +The last step of our flow is to make those pre-commit checks part of the day-to-day activities, running on the dbt models that are newly created or modified. That way, even if we don’t fix all our models at once, if they get modified at some points, tests will need to be added for the PR to be merged. + +Adding periodic pre-commit checks can be done in 2 different ways, through CI (Continuous Integration) actions, or as git hooks when running dbt locally + +#### a) Adding pre-commit-dbt to the CI flow (works for dbt Cloud and dbt CLI users) + +The example below will assume GitHub actions as the CI engine but similar behavior could be achieved in any other CI tool. + +As described before, we need to run a `dbt docs generate` in order to create updated artifacts used in the pre-commit hooks. + +For that reason, we will need our CI step to execute this command, which will require setting up a `profiles.yml` file providing dbt the information to connect to the data warehouse. Profiles files will be different for each data warehouse ([example here](https://docs.getdbt.com/reference/warehouse-profiles/snowflake-profile)). + +In our case, let’s create a file called `profiles.yml` at the root of our dbt project, with the following information: + +```yaml +​​jaffle_shop: + target: ci + outputs: + ci: + type: postgres + host: + user: + password: "{{ env_var('DB_PASSWORD') }}" + port: 5432 + dbname: + schema: ci + threads: 4 +``` + +We don’t want to save the password of our user in a clear text file. For that purpose, we use the ability to read it from an environment variable. The next step is to save the value of our password as a secret in GitHub. In our GitHub repository, under Settings > Security > Secrets > Action, let’s create a secret called DB_PASSWORD to store our sensitive password. + +Finally, we can create a new YAML file to define our GitHub action. e.g. `.github/workflows/pre_commit_checks.yml` The name is not important but this file must be saved under the folders `.github/workflows/` (create those if they don’t exist yet) + +```yaml +name: pre-commit-check + +on: + pull_request: + branches: + - main + +jobs: + pre-commit-pip: + name: Install pre-commit via pip + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: ['ubuntu-latest'] + python-version: [3.8] + + # Set environment variables used throughout workflow + env: + DBT_PROFILES_DIR: . + DB_PASSWORD: ${{ secrets.DB_PASSWORD }} + + steps: + + - name: Checkout branch + uses: actions/checkout@v2 + + # Using bash and pip to install dbt and pre-commit + # Update the dbt installation command to include the adapter you need + - name: Install dbt and pre-commit + shell: bash -l {0} + run: | + python -m pip install dbt-postgres pre-commit + + # This action will output all the files that are being created and modified in our PR + - name: Get changed files + id: get_file_changes + uses: trilom/file-changes-action@v1.2.4 + with: + output: ' ' + + # Transforming the output of get_file_changes to a string we can use for our next step + # We want to take in account both new files and files that have been modified + - name: Get changed .sql files in /models to lint + id: get_files_to_lint + shell: bash -l {0} + run: | + # Set the command in the $() brackets as an output to use in later steps + echo "::set-output name=files::$( + # Issue where grep regular expressions don't work as expected on the + # Github Actions shell, check dbt/models/ folder + echo \ + $(echo ${{ steps.get_file_changes.outputs.files_modified }} | + tr -s ' ' '\n' | + grep -E '^models.*[.]sql$' | + tr -s '\n' ' ') \ + $(echo ${{ steps.get_file_changes.outputs.files_added }} | + tr -s ' ' '\n' | + grep -E '^models.*[.]sql$' | + tr -s '\n' ' ') + )" + + # Finally run pre-commit + - name: Run pre-commit + shell: bash -l {0} + run: | + pre-commit run --files ${{ steps.get_files_to_lint.outputs.files }} +``` + +The code is documented and should be self-explanatory, in a nutshell, we perform the following steps: +- Mention that this action and all the steps should be run for every PR to main +- Retrieve the code from our PR +- Install dbt and pre-commit +- Identify the files modified in our PR and format them as a list of models separated by spaces +- Execute a `pre-commit run --files` on the models we just modified or created + +Once we push those changes to our repo to a custom branch and create a PR to main, we see the following: + +The GitHub action is running: + +![Screenshot of a GitHub action executing on a PR that is running the pre-commit-check test](/img/blog/2022-07-26-pre-commit-dbt/testing-running.png) + +The step fails because I missed some tests and it tells me what model is failing: + +![Screenshot of the errors logs for the failed pre-commit-check test on the PR shown previously](/img/blog/2022-07-26-pre-commit-dbt/error-logs.png) + +The result of the check is also shown in the PR directly: + +![Screenshot of the failed test shown directly in the PR "checks" interface](/img/blog/2022-07-26-pre-commit-dbt/checks-failed.png) + + +With that information, I could now go back to dbt, document my model customers and push those new changes to my repo for another check to be performed. + +We could set up rules that prevent any change to be merged if the GitHub action fails. Alternatively, this action step can be defined as merely informational. + +#### b) Installing the pre-commit git hooks (for dbt CLI users) + +If we develop locally with the dbt CLI, we could also execute `pre-commit install` to install the git hooks. What it means then is that every time we want to commit code in git, the pre-commit hooks will run and will prevent us from committing if any step fails. + +If we want to commit code without performing all the steps of the pre-hook we could use the environment variable SKIP or the git flag `--no-verify` as described [in the documentation](https://pre-commit.com/#temporarily-disabling-hooks). (e.g. we might want to skip the auto `dbt docs generate` locally to prevent it from running at every commit and rely on running it manually from time to time) + +And if we install the hooks and realize that we don’t want them anymore, we just need to delete the folder `.git/hooks/` + +#### c) So tests in CI, or git hooks locally? + +Those two configurations are not exclusive but complementary. +- Having the hooks set up locally ensures that all our models follow our conventions even before pushing them to our repo, ensuring that the CI step will pass correctly. +- And the GitHub action as CI test is a great safeguard for people using the dbt Cloud IDE or local developers who either have not installed the hooks or tried to push changes with the `--no-verify` flag + +![A meme that reads "Why not both?"](/img/blog/2022-07-26-pre-commit-dbt/why-not-both-meme.png) + +## Taking this solution further + +We now have a process to ensure that the rules we are setting around testing required in our dbt models are enforced through automated steps. + +What next? Going back to the diagram at the beginning of this post, we can now think of new rules we want to automate and modify our pre-commit file and GitHub actions to increase the quality of our dbt project. + +It is important though to keep in mind a good balance between setting enough rules and automation to ensure a project of good quality and setting too many of them, taking time from more value-added work and potentially slowing down the overall analytics development process. + +![A diagram that adds additional steps to the original diagram shown in the beginning of the article. This solution includes a cyclical step for continuously adding new rules and leveraging a SQL linter.](/img/blog/2022-07-26-pre-commit-dbt/next-strategy.png) + +- We could for example add SQLFluff as a SQL Linter to show us what SQL code is not following the rules that we have defined +- Or we could add more pre-commit-dbt checks like check-model-name-contract to make sure that all our model names are following the correct naming convention +- Or we could add a check on our YAML files to verify if they are all indented correctly + +If you have any questions about this process or want to share how you are using pre-commit and CI to increase the quality of your dbt project, feel free to jump in dbt Slack and post in [#i-made-this](https://getdbt.slack.com/archives/C01NH3F2E05) or [#dbt-deployment-and-orchestration](https://getdbt.slack.com/archives/CMZ2Q9MA9) ! diff --git a/website/blog/2022-07-27-understanding-the-components-of-the-dbt-semantic-layer.md b/website/blog/2022-07-27-understanding-the-components-of-the-dbt-semantic-layer.md new file mode 100644 index 00000000000..3db07f6f26a --- /dev/null +++ b/website/blog/2022-07-27-understanding-the-components-of-the-dbt-semantic-layer.md @@ -0,0 +1,173 @@ +--- +title: "Understanding the components of the dbt Semantic Layer" +description: "Heard about dbt Metrics or the dbt Semantic Layer and curious to give them a try? Callum McCann digs into what they are, walks through an example, and discusses how they all fit together!" +slug: understanding-the-components-of-the-dbt-semantic-layer + +authors: [callum_mccann] + +tags: [dbt product updates] +hide_table_of_contents: false + +date: 2022-07-27 +is_featured: true +--- + +# Getting started with the dbt Semantic Layer + +> TLDR: The Semantic Layer is made up of a combination of open-source and SaaS offerings and is going to change how your team defines and consumes metrics. + +At last year's Coalesce, Drew showed us the future[^1] - a vision of what metrics in dbt could look like. Since then, we've been getting the infrastructure in place to make that vision a reality. We wanted to share with you where we are today and how it fits into the broader picture of [where we're going](https://www.getdbt.com/blog/dbt-semantic-layer). + +To those who haven't followed this saga with the intensity of [someone watching their investments on the crypto market](https://mobile.twitter.com/scannergr1/status/1536198701215109122/photo/1), we're rolling out this new resource to help you better understand the dbt Semantic Layer and provide clarification on the following things: + +1. What is the dbt Semantic Layer? +2. How do I use it? +3. What is publicly available now? +4. What is still in development? + +With that, lets get into it! + + + +> Some of you might have been around when this was initially being referred to as the Metrics Layer. As we evaluated the long term plans for what this part of dbt was going to become, we realized that naming it the Semantic Layer better reflected its capabilities and where we plan on taking it. + +## What is the dbt Semantic Layer? + +The dbt Semantic Layer is a new part of dbt to help improve precision and consistency while expanding flexibility and capability in the modern data stack. Our maestro of metrics, Drew Banin, [released a blog post detailing the vision of where we're going here](https://www.getdbt.com/blog/dbt-semantic-layer). The first use case that we are addressing is one that most practicioners **and** stakeholders are familiar with - metrics. We'll walk through what this looks like in practice later on in this post. + +Under the hood, the dbt Semantic layer is collection of several components - some of these are part of dbt Core, some part of dbt Cloud, and some are net new functionality. They all [combine together like Voltron](https://www.youtube.com/watch?v=5rPSLQxMT8w) to create a single experience through which business users can query data in the context of the metric that is most familiar to them. And the best part is that they can do it in systems they are already comfortable using. + +***What will this look like for my data consumers and business stakeholders?*** + +Ultimately, this looks like people being able to interact with trusted datasets in the tools that they are comfortable with (and eventually new tools designed specifically around metrics). + +An example that we’ve found helpful is [ARR](https://www.zuora.com/billing-topics/annual-recurring-revenue/#:~:text=Annual%20Recurring%20Revenue%2C%20or%20ARR,for%20a%20single%20calendar%20year). A business-critical metric to SaaS companies, ARR can be a tricky calculation to keep consistent across all of the tools used in the business. With the dbt Semantic Layer, this definition would live in dbt and the logic to create the dataset for that metric would be consistent across all different consuming experiences. Best of all, definition changes would get reflected in downstream tools, so you no longer need to manually search and update every downstream dependency. Callum of 3 years ago is jumping with joy. + +***That’s good and all, but what does this look like for practitioners to use?*** + +The dbt Semantic layer is comprised of the following components[^2]: + +**Available Today** + +- **[`metric` node in dbt Core :](https://docs.getdbt.com/docs/building-a-dbt-project/metrics)** Similar to `models` or `sources` , this is a specific node type in dbt Core. It is the definition of a time-series aggregation over a table that supports zero or more dimensions. The resulting node is stored in the `manifest.json` just like `models` and referenced in the DAG. +- **[`dbt_metrics` package:](https://github.com/dbt-labs/dbt_metrics)** this package provides macros that combine the version-controlled metric definition and query-time parameters (like dimensions, a time grain, and secondary calculations) to generate a SQL query which calculates the metric value. +- **[dbt Cloud Metadata API](https://docs.getdbt.com/docs/dbt-cloud-apis/metadata-api):** a GraphQL API which supports arbitrary queries over the metadata produced by dbt Cloud jobs. Contains metadata related to the accuracy, recency, configuration, and structure of the views and tables in the warehouse, as well as much more. + +**New** + +- **dbt Server:** this component wraps dbt Core in a persistent server that is responsible for handling RESTful API requests for dbt operations. It’s a thin interface that is primarily responsible for performance and reliability in production environments. +- **dbt Cloud proxy server:** this component enables dbt Cloud to dynamically rewrite requests to a data warehouse and compile dbt-SQL into raw SQL that the database understands. It then returns the dataset produced by the raw SQL to the platform that sent it. + +![Untitled](/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/semantic-layer-description.png) + +### Understanding how and when to use metrics? + +> Use of metrics and the metrics package is recommended for experienced dbt users and early adopters who want to explore this functionality. + +Let's walk through an example of how you can use the components above to get started today using our old friend - [the Jaffle Shop](https://github.com/dbt-labs/jaffle_shop_metrics). We'll take a look at how you can start defining and testing metrics today as well as how you'll interact with them once the new components are released. + +**When to use Metrics** + +The first question you need to ask is, *Should we be using metrics?* + +It is our belief that metrics are not a one-size fits all solution. They are designed for core business metrics where consistency and precision are of key importance, not for exploratory use cases or ad hoc analysis. Our shorthand way of determining whether the metric should be defined in dbt has been - *is this something our teams need to report on?* + +So, let’s say the CFO of our Jaffle comes to us on a Monday morning and commands the data team to overhaul how we're reporting on Revenue. Our Regional Manager Jim and Sales Director Pam[^3] have been giving him different reports! Right now its a mess of tools and inconsistencies - Jim’s numbers are defined in Tableau and say one thing, Pam’s within Hex and say another! The CFO is frustrated with it and wants a cohesive experience across the company where everyone has the same numbers for revenue. It passes the report test, it’s an important business metric; away we go! + +**Defining the Metric with Metric Node** + +In this example, we’ll say that both Jim and Pam are pulling from a table created by dbt called `orders`. It currently contains fields for `amount` and all different methods of payment_amounts, such as credit cards or gift cards. Jim has been calculating revenue by summing up the `credit_card_amount` and `gift_card_amount` fields, as he forgot to update his definition when the business added coupons and bank transfers payments. Meanwhile, Pam is correctly summing the `amount` field but hasn’t accounted for return orders that shouldn’t be counted! + +The first step is creating a unified definition for what revenue is. In order to do this, we will create the following yml definition within our dbt repo: + +```yaml +version: 2 + +metrics: + - name: revenue + label: Revenue + model: ref('orders') + description: "The total revenue of our jaffle business" + + type: sum + sql: amount + + timestamp: order_date + time_grains: [day, week, month, year] + + dimensions: + - customer_status + - has_coupon_payment + - has_bank_transfer_payment + - has_credit_card_payment + - has_gift_card_payment + + filters: + - field: status + operator: '=' + value: "'completed'" +``` + +This metric has now been defined in the dbt metadata and can be seen in the DAG! + +![Untitled](/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/metrics-dag.png) + +**Running The Metric Package To calculate the metric** + +In order to ensure that both Jim and Pam are retrieving the same numbers for their metric, we’ll need them to both run a metrics `calculate` query. In this example, we’re not interested in the specific payment types and only want to see revenue broken up by `week` and `customer_status`. + +```sql +select * +from {{ metrics.calculate( + metric('revenue'), + grain='week', + dimensions=['customer_status'] +) }} +``` +This would return a dataset that looks like this: + +| date_week | customer_status | revenue | +| --- | --- | --- | +| 2018-01-01 | Churn Risk | 43 | +| 2018-01-01 | Churned | 0 | +| 2018-01-01 | Healthy | 26 | +| 2018-01-08 | Churn Risk | 27 | + +Jim and Pam would then be able to reference the `revenue` column within the newly created dataset and never have to worry about the calculation of revenue ever again[^4]! The world is perfect and [balance has been restored.](https://www.youtube.com/watch?v=d1EnW4kn1kg) + +**In the near future with dbt Server** + +When dbt Server releases later this year, the flow of how metrics are consumed will change significantly. Your organization will no longer need to materialize each metric within a model in order to take advantage of the metric definition. Instead, you’ll be able to directly query dbt Server with the metric code provided and have the correct dataset returned to your BI tool of choice. + +Additionally, integration partners will have built out experiences around Metrics using the Metadata API to create unique and creative ways for consumers to obtain metric data while abstracting away complexity. For example, a box that allows the user to select from a list of metrics, time grains, dimensions, and secondary calculation and then have the correct information returned to them regardless of the selection! + +### So what is publicly available now? + +Right now, the two main open-source components that are publicly available are the [`metric` node](https://docs.getdbt.com/docs/building-a-dbt-project/metrics) within dbt Core and the `dbt_metrics` package. Combined, these two can operate an introductory semantic layer experience by allowing analytics engineers to define metrics and then query that metric via the metrics package. + +These two components are a static experience that have to be defined in the dbt project (as the selected dimensions are defined at model creation) but are useful for those who want to ensure that metrics remain consistent across every BI tool. If you identify with any of the following conditions, you could be a good fit for implementing this as it exists today: + +- You want to prepare your organization for the full Semantic Layer launch. +- Your organization has at least a few key metrics +- Your organization uses 1 or more BI tools +- Your organization occasionally has issues around different metric calculations +- Your organization wants a centralized location for all metrics so everyone in the business knows where to look + +All of these are great reasons to begin exploring implementing metrics in your dbt project! If you’re curious about what an implementation of this might look like, we recommend referencing the [jaffle_shop_metrics](https://github.com/dbt-labs/jaffle_shop_metrics) repo! + +### What is still in development? + +Both the dbt Cloud proxy server and dbt Server are currently in development, with a scheduled release of later this year. If you’re curious about testing them once they are released, we recommend keeping an eye on our product announcements and then reaching out once they become publicly available! + +### What if I have questions? + +If you have any questions about those components, or metrics in general, please feel free to post in the #dbt-metrics-and-server channel on dbt Slack! I hang around there and am always willing to chat metrics! + +### Footnotes +[^1]: That future may not have mentioned robots but I'm holding out for [Jetson's style morning machine](https://www.youtube.com/watch?v=-0S3Jf-NxdI) to help me get ready in the morning. + +[^2]: We’re specifically calling out the licensing because there is a lot of confusion in the community around what is open-source and what isn’t. This is only becoming trickier with the introduction of the BSL licensing, which ensures users can run their own server but it cannot be sold as a cloud service. For more information on why these licensing types were picked, we recommend [Tristan’s blog around licensing dbt.](https://www.getdbt.com/blog/licensing-dbt/). The big takeaway around licensing is that you can still run components of the dbt Semantic Layer even if you aren’t a dbt Cloud customer! + +[^3]: Full transparency, I've never seen the Office. The awkward humor makes me so uncomfortable that I have to turn off the TV. Apologies if the titles of the characters are incorrect. + +[^4]: Psych! They’re definitely interested in the calculation of ARR. In fact, they don’t really trust the numbers **unless** they understand how it’s calculated. This is where they could use the Metadata API in order to query all the information about the metric, such as definition, run-time, acceptable dimensions, etc. Right now Jim and Pam would need to query the API directly but in the future we expect there to be a number of different ways to obtain this information, ranging from [direct integration with the BI tool](https://learn.hex.tech/docs/connect-to-data/data-connections/dbt-integration) all the way to having that information materialized in a dbt information schema! *For current tabular alternatives, there are some interesting macros in the newly released [dbt-project-evaluator package](https://github.com/dbt-labs/dbt-project-evaluator). Take a look there if you’re curious about materializing your metric information!* \ No newline at end of file diff --git a/website/blog/2022-08-12-how-we-shaved-90-minutes-off-long-running-model.md b/website/blog/2022-08-12-how-we-shaved-90-minutes-off-long-running-model.md new file mode 100644 index 00000000000..09cb9c4c178 --- /dev/null +++ b/website/blog/2022-08-12-how-we-shaved-90-minutes-off-long-running-model.md @@ -0,0 +1,293 @@ +--- +title: "How we shaved 90 minutes off our longest running model" +description: "Monitoring large, complex projects can be difficult. When you're running 1,000+ models in a day, how do you know which of those consistently take the longest to run? In this article, Bennie Regenold and Barr Yaron show the benefits of the Model Timing tab in dbt Cloud." +slug: how-we-shaved-90-minutes-off-model +authors: [bennie_regenold, barr_yaron] +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-08-18 +is_featured: true +--- + +When running a job that has over 1,700 models, how do you know what a “good” runtime is? If the total process takes 3 hours, is that fantastic or terrible? While there are many possible answers depending on dataset size, complexity of modeling, and historical run times, the crux of the matter is normally “did you hit your SLAs”? However, in the cloud computing world where bills are based on usage, the question is really “did you hit your SLAs _and stay within budget_”? + +Here at dbt Labs, we used the Model Timing tab in our internal analytics dbt project to help us identify inefficiencies in our incremental dbt Cloud job that eventually led to major financial savings, and a path forward for periodic improvement checks. + + + +## Your new best friend: The Model Timing tab + +The dbt Labs internal project is a beast! Our daily incremental dbt Cloud job runs 4x/day and invokes over 1,700 models. We like to sift through our dbt Cloud job using the [Model Timing](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab) tab in dbt Cloud. The `Model Timing` dashboard displays the model composition, order, and run time for every job run in dbt Cloud (for team and enterprise plans). The top 1% of model durations are automatically highlighted, which makes it easy to find bottlenecks in our runs. You can see that our longest running model stuck out like a sore thumb -- here's an example of our incremental job before a fix was applied: + +![Model Timing tab before picture](/img/blog/2022-08-12-model-timing/model_timing_before.png) + +As you can see, it's straightforward to identify the model that's causing the long run times and holding up other models. The model `fct_dbt_invocations` takes, on average, 1.5 hours to run. This isn't surprising, given that it's a relatively large dataset (~5B records) and that we're performing several intense SQL calculations. Additionally, this model calls an [ephemeral model](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/materializations#ephemeral) named `dbt_model_summary` that also does some heavy lifting. Still, we decided to explore if we could refactor this model and make it faster. + +After refactoring this code, we ended up swapping the ephemeral model `dbt_model_summary` to an incremental model that took the bulk of the processing out of the main `fct_dbt_invocations` model. Instead of recalculating this complex logic every run, we pull only new data and run that logic on the smaller subset of those records. The combined run time of the new `dbt_model_summary` and `fct_dbt_invocations` is now ~15-20 minutes, a savings of over an hour per run! + +![Model Timing tab after picture](/img/blog/2022-08-12-model-timing/model_timing_after.png) + +## Identifying the problem + +This project runs on Snowflake, so all the examples below show the Snowflake UI. However, it is possible to do a similar style of analysis in any data warehouse. + +Also, this blog post represents a pretty technical deep dive. If everything you read here doesn't line up immediately, that's ok! We recommend reading through this article, then brushing up on cloud data warehouses and query optimization to help supplement the learnings here. + +### Unpacking the query plan + +Finding this long running query was step one. Since it was so dominant in the Model Timing tab, it was easy to go straight to the problematic model and start looking for ways to improve it. The next step was to check out what the Snowflake query plan looked like. + +There are a few ways you can do this: either find the executed query in the `History` tab of the Snowflake UI, or grab the compiled code from dbt and run it in a worksheet. As it’s running, you can click on the `Query ID` link to see the plan. More details on this process are available in your provider’s documentation ([Snowflake](https://docs.snowflake.com/en/user-guide/ui-query-profile.html), [BigQuery](https://cloud.google.com/bigquery/docs/query-plan-explanation), [Redshift](https://docs.aws.amazon.com/redshift/latest/dg/c-the-query-plan.html), [Databricks](https://docs.databricks.com/sql/admin/query-profile.html)). + +Below you can see the query plan for `fct_dbt_invocations`, which includes the logic from `dbt_model_summary`: + +![Snowflake query plan](/img/blog/2022-08-12-model-timing/snowflake_query_plan.png) + +From the query profile, it was easy to find the issue. There are two window functions that account for over 90% of the run time when we factor in the table scan needed to retrieve the data. Additionally, there is nearly 1TB worth of data that is [spilled to remote storage](https://github.com/dbt-labs/docs.getdbt.com/discussions/1550) as part of this query. Within Snowflake, [remote storage is considerably slower](https://docs.snowflake.com/en/user-guide/ui-query-profile.html?_ga=2.162889724.1914632094.1659383329-1610273913.1651521575&_gac=1.229438062.1658341107.Cj0KCQjwz96WBhC8ARIsAATR2516I_11uMuOf0cXXe0zlyOBUXLap5CfVvKnpGwTb3bLqC5tHwlurxcaAskEEALw_wcB#queries-too-large-to-fit-in-memory:~:text=This%20spilling%20can%20have%20a%20profound%20effect%20on%20query%20performance%20(especially%20if%20remote%20disk%20is%20used%20for%20spilling).%20To%20alleviate%20this%2C%20we%20recommend%3A) to both write and read from, so any data that’s on the remote drive will really slow down a query. We’ve found the problem! + +### Understanding the data + +Once we identified the issue, we had to find a way to fix it. + +First, it’s good to have a high level understanding of the underlying data for `fct_dbt_invocations`. Any time you issue a command to dbt (run, test, build, snapshot, etc.), we track certain pieces of metadata about that run. We call these “invocations,” and as you can imagine, dbt is invoked *a lot*. The table this query is running against is filtered, but still has somewhere in the neighborhood of 5 billion rows. The relevant pieces of data that we are using in this query include project IDs, model IDs, and an anonymized hash key representing the raw model contents to know if a model changed. + +If you’re curious, here’s a look at the query for `dbt_model_summary` before any changes were made: + +```sql +{{config(materialized = 'ephemeral')}} + +with model_execution as ( + + select * from {{ ref('stg_dbt_run_model_events') }} + +), + +diffed as ( + + select *, + + row_number() over ( + partition by project_id, model_id + order by dvce_created_tstamp + ) = 1 as is_new, + + /* + The `mode` window function returns the most common content hash for a + given model on a given day. We use this a proxy for the 'production' + version of the model, running in deployment. When a different hash + is run, it likely reflects that the model is undergoing development. + */ + + contents != mode(contents) over ( + partition by project_id, model_id, dvce_created_tstamp::date + ) as is_changed + + from model_execution + +), + +final as ( + + select + invocation_id, + max(model_complexity) as model_complexity, + max(model_total) as count_models, + sum(case when is_new or is_changed then 1 else 0 end) as count_changed, + sum(case when skipped = true then 1 else 0 end) as count_skip, + sum(case when error is null or error = 'false' then 0 else 1 end) as count_error, + sum(case when (error is null or error = 'false') and skipped = false then 1 else 0 end) as count_succeed + + from diffed + group by 1 + +) + +select * from final +``` + +The window functions referenced above are answering the following questions: + +- `row_number()` + - *Is this the first time that this specific model has run in a project?* + - Note: this grain is at the project level +- `mode()` + - *Is this the most frequent version of the model that ran today (based on the hashed contents)?* + - Note: this grain is at the model + run date level + +## Moving to solutions + +### Attempt #1: Optimizing our objects and materializations + +Given the size and complexity of this query, the first few approaches we took didn’t focus on changing the query as much as optimizing our objects and materializations. + +The two window functions (`row_number()` and `mode()` in the `diffed` above) were in an [ephemeral model](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/materializations#ephemeral) which isn’t stored in the data warehouse, but is instead executed in-memory at run time. Since it was obvious our virtual warehouse was running out of memory (remote storage spillage), we tried swapping that to a view, then a table materialization. Neither of these improved the run time significantly, so we tried clustering the table. However, since our two window functions are at different grains there wasn’t a great clustering key we found for this. + +### Attempt #2: Moving to an incremental model + +The final strategy we tried, which ended up being the solution we implemented, was to swap the ephemeral model (`dbt_model_summary`) to an [incremental model](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models). Since we’re calculating metrics based on historical events (**first** model run, most frequent model run **today**), an incremental model let us perform the calculation for all of history once in an initial build, then every subsequent build only needs to look at a much smaller subset of the data to run it’s calculations. + +One of the biggest problems with the ephemeral model was remote spillage due to lack of memory, so having a smaller dataset to run the calculation against made a massive impact. Snowflake can easily calculate a daily mode or a first model run when we only had to look at a sliver of the data each time. + +Swapping from ephemeral to incremental can be simple, but in this case we are calculating at two grains and need more than just the data loaded since the prior run. + +- `row_number()` + - To get the first time a model was run, we need every invocation of that model to see if this is the first one. Still, we don’t need the full history, just the subset that changed today. This is handled in the `new_models` CTE you can see below. +- `mode()` + - Since we’re calculating a daily mode, we actually need the full day’s worth of data every time this incremental model runs. We do that by applying the `::date` operator to our incremental logic to always pull a full days (or multiple days) worth of history each time. + +This let to slightly more complex logic in the model, as you can see below: + +```sql +{{config(materialized = 'incremental', unique_key = 'invocation_id')}} + +with model_execution as ( + + select * + from {{ ref('stg_dbt_run_model_events') }} + where + 1=1 + {% if target.name == 'dev' %} + + and collector_tstamp >= dateadd(d, -{{var('testing_days_of_data')}}, current_date) + + {% elif is_incremental() %} + + --incremental runs re-process a full day everytime to get an accurate mode below + and collector_tstamp > (select max(max_collector_tstamp)::date from {{ this }}) + + {% endif %} + +), + +{# When running rull refresh we have access to all records, so this logis isn't needed #} +{% if is_incremental() %} +new_models as ( + + select + project_id, + model_id, + invocation_id, + dvce_created_tstamp, + true as is_new + from {{ ref('stg_dbt_run_model_events') }} as base_table + where + exists ( + select 1 + from model_execution + where + base_table.project_id = model_execution.project_id + and base_table.model_id = model_execution.model_id + ) + qualify + row_number() over(partition by project_id, model_id order by dvce_created_tstamp) = 1 + + +), +{% endif %} + +diffed as ( + + select model_execution.*, + + {% if is_incremental() %} + + new_models.is_new, + + {% else %} + + row_number() over ( + partition by project_id, model_id + order by dvce_created_tstamp + ) = 1 as is_new, + + {% endif %} + + /* + The `mode` window function returns the most common content hash for a + given model on a given day. We use this a proxy for the 'production' + version of the model, running in deployment. When a different hash + is run, it likely reflects that the model is undergoing development. + */ + + model_execution.contents != mode(model_execution.contents) over ( + partition by model_execution.project_id, model_execution.model_id, model_execution.dvce_created_tstamp::date + ) as is_changed + + from model_execution + {% if is_incremental() %} + left join new_models on + model_execution.project_id = new_models.project_id + and model_execution.model_id = new_models.model_id + and model_execution.invocation_id = new_models.invocation_id + and model_execution.dvce_created_tstamp = new_models.dvce_created_tstamp + {% endif %} + +), + +final as ( + + select + invocation_id, + max(collector_tstamp) as max_collector_tstamp, + max(model_complexity) as model_complexity, + max(model_total) as count_models, + sum(case when is_new or is_changed then 1 else 0 end) as count_changed, + sum(case when skipped = true then 1 else 0 end) as count_skip, + sum(case when error is null or error = 'false' then 0 else 1 end) as count_error, + sum(case when (error is null or error = 'false') and skipped = false then 1 else 0 end) as count_succeed + + from diffed + group by 1 + +) + +select * from final +``` + +The astute reader will notice that the entire `new_models` CTE is wrapped in an `{% if is_incremental() %}` block. That’s because when the model is ran incrementally we need the full history of model runs for the given model. This means we have to join back to the main table to get that full history. However, when we’re running this as a full refresh (or on the initial load), we already have the full history of runs in the query, so we don’t need to join back to the table. This additional piece of `{% if is_incremental() %}` dropped the full refresh run time down from over 2 hours to just under 30 minutes. This is a one time savings (or however often we have to full refresh), but is well worth the slightly more complex logic. + +## Difficulty with testing + +A major challenge in testing and implementing our changes was the volume of data needed for comparison testing. Again, the biggest problem we had was that our virtual warehouse was running out of memory, so trying to do performance testing on a subset of the data had misleading results (our testing was a subset of 10 million records). Since this query runs just fine on a small set of data (think the incremental runs), when we were initially trying to performance test the new vs old it looked like there was no real benefit to the incremental model. This led to many wasted hours of trying to figure out why we weren’t seeing an improvement. + +Eventually, we figured out that we needed to test this on the full dataset to see the impact. In the cloud warehousing world where you pay-for-use this has very easy to track cost implications. However, you have to spend money to make money, so we decided the increased cost associated with testing this on the full dataset was worth the expense. + +To start with, we [cloned](https://docs.snowflake.com/en/sql-reference/sql/create-clone.html) the entire prod schema to a testing schema, which is a free operation in Snowflake. Then, we did an initial build of the new `dbt_model_summary` model since it was switching from ephemeral to incremental. Once that was complete, we were able to delete out a few days worth of data from both `dbt_model_summary` and `fct_dbt_invocations` to see how long an incremental run would take. This represented the true day-to-day runs, and the results were fantastic! The combined run time of both models dropped from 1.5 hours to 15-20 minutes for incremental runs. + +## Benefits of the improvement + +The end result of this improvement saves a nice chunk of change. Since this query was running 4 times per day and took 1.5 hours per run, this change is saving roughly 5 hours per day in run time. Given that this on Snowflake, we can calculate the savings based on their public pricing. Currently for the Enterprise edition of Snowflake it costs $3/credit, and a medium warehouse consumes 4 credits/hour. Putting this all together, that’s a savings of ~$1800/month. + +The cost savings are great, but there are two other “time based” benefits that come from faster runs: + +1. Since this process runs 4 times daily, there is a limit to the length any given run can take, and in turn how many metrics we can calculate. By saving time on the longest running metrics it frees up runtime for us to add new logic to our runs. This generally leads to happier end consumers because they get more information to work with. +2. If the need ever arises to refresh our data more frequently, we now have some runway to do that. While these particular models will never be near-real-time, we could realistically get more up-to-date information since we can now process the data faster. + +## Conclusion + +Developing an analytic code base is an ever-evolving process. What worked well when the dataset was a few million records may not work as well on billions of records. As your code base and data evolve, be on the lookout for areas of improvement. This article showed a very specific example around two models, but the general principals can be applied to any code base: + +1. **Periodically review your run times** + + This is made easy with the [Model Timing tab](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab) in dbt Cloud. You can quickly go to any run to see the model composition, order, and run time for every run in dbt Cloud. The longest running models stick out like a sore thumb! + +2. **Use the query analyzer from your data warehouse** + + Once you’ve found the problematic model (or models!), use the query analyzer to find which part of the model takes the longest to run. The graphical tree provided gives you a more fine grained view into what is going on. Some tips to look out for: + + - Window functions on large data sets + - Cross joins + - OR joins + - Snowflake specifically: spilling to remote disk +3. **Try a few different approaches** + + There is seldom one solution to a problem, especially in a system as complex as a data warehouse. Don’t get too bogged down on a single solution, and instead try a few different strategies to see their impact. If you can’t commit to fully rewriting the logic, see if clustering/partitioning the table will help. Sometimes a bigger warehouse really is the solution. If you don’t try, you’ll never know. + +4. **Test on representative data** + + Testing on a [subset of data](https://docs.getdbt.com/guides/legacy/best-practices#limit-the-data-processed-when-in-development) is a great general practice. It allows you to iterate quickly, and doesn’t waste resources. However, there are times when you need to test on a larger dataset for problems like disk spillage to come to the fore. Testing on large data is hard and expensive, so make sure you have a good idea of the solution before you commit to this step. + +5. **Repeat** + + Remember that your code and data evolves and grows. Be sure to keep an eye on run times, and repeat this process as needed. Also, keep in mind that a developer’s time and energy are a cost as well, so going after a handful of big-hitting items less frequently may be better than constantly rewriting code for incremental gains. diff --git a/website/blog/2022-08-17-managing-surrogate-keys-in-dbt.md b/website/blog/2022-08-17-managing-surrogate-keys-in-dbt.md new file mode 100644 index 00000000000..cae25bc4b96 --- /dev/null +++ b/website/blog/2022-08-17-managing-surrogate-keys-in-dbt.md @@ -0,0 +1,230 @@ +--- +title: "Surrogate keys in dbt: Integers or hashes?" +description: "Wondering how to build a data model with surrogate keys? Dave Connors walks you through two strategies." +slug: managing-surrogate-keys + +authors: [dave_connors] + +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-08-24 +is_featured: true +--- + +Those who have been building data warehouses for a long time have undoubtedly encountered the challenge of building surrogate keys on their data models. Having a column that uniquely represents each entity helps ensure your data model is complete, does not contain duplicates, and able to join across different data models in your warehouse. + +Sometimes, we are lucky enough to have data sources with these keys built right in — Shopify data synced via their API, for example, has easy-to-use keys on all the tables written to your warehouse. If this is not the case, or if you build a data model with a compound key (aka the data is unique across multiple dimensions), you will have to rely on some strategy for creating and maintaining these keys yourself. How can you do this with dbt? Let’s dive in. + + + +## How were surrogate keys managed in the past? + +Before the advent of the analytical warehouse tools we use today, the data warehouse architecture had a few key constraints that led to the rise of the Kimball-style warehouse with a snowflake schema. This was because storage was expensive — it was more efficient to store data as few times as possible, and rely on joins to connect data tog ether when a report required it. And to make those joins efficient, it became standard practice to use **monotonically increasing integer surrogate keys (MIISKs)**, a fancy way to say “count each record starting at one” so that your data model would look something like this (you are a cheesemonger): + +| product_id | product_name | created_by | created_at | +| --- | --- | --- | --- | +| 1 | mozzarella | 23 | 0001-05-05 | +| 2 | cheddar | 24 | 1150-02-03 | +| 3 | gruyere | 25 | 1655-04-03 | + +| order_line_id | order_id | product_id | amount | created_at | +| --- | --- | --- | --- | --- | +| 1 | 1 | 3 | 40 | 2022-07-01 | +| 2 | 2 | 1 | 50 | 2022-07-05 | +| 3 | 3 | 1 | 10 | 2022-07-07 | +| 4 | 3 | 2 | 30 | 2022-07-07 | + +| order_id | customer_id | created_at | +| --- | --- | --- | +| 1 | 5 | 2022-07-01 | +| 2 | 8 | 2022-07-05 | +| 3 | 10 | 2022-07-07 | + +There are some clear benefits here! + +- There are clear, intuitive relationships between these entities! +- The fact that the keys here are small integers, the database can a) not worry about storage costs for this data b) index this field easily, making joins quick and efficient. + +However, there are also some clear maintenance issues here. Making updates to, say, your products table will require some careful surgical work to ensure the association of cheddar to id 2 is never accidentally changed. You may have heard of the phrase “load your dims before your facts” — this refers to the careful work required to maintain this referential integrity. Additionally, you need to know about the *exact state of the data* before making any updates. This data is **stateful**, making it rigid and more difficult to work with should there be any losses to this data. Imagine trying to rebuild these relationships from scratch! + +## MIISKs in dbt + +If this is your preferred modeling approach, dbt can absolutely support this workflow! This will likely require you to take advantage of built-in warehouse functionality to generate these MIISKs — in Snowflake, we can use [sequences](https://docs.snowflake.com/en/user-guide/querying-sequences.html), which are objects built exactly for this purpose. We’ll use Snowflake as the example here, but this approach can likely be adapted for other warehouses as well. + +### Creating and maintaining sequences + +In order to properly maintain the sequence of the surrogate keys in your data models, we’ll need to build and maintain a sequence for each table that needs one. In order to do this at scale, we’ll make use of the [meta](https://docs.getdbt.com/reference/resource-configs/meta) config of dbt model. This configuration allows you to define any metadata dictionary that you want. Using this, we can programmatically apply a surrogate key configuration for each model that needs one, and reference that configuration in a macro to properly create and update surrogate keys when necessary. + +Here’s an example configuration: + +```yaml +# assign the surrogate key config to your model + +version: 2 + +models: + - name: dim_customers + description: all customers + config: + meta: + surrogate_key: true +``` + +This metadata can then be leveraged in a macro in an `on-run-start` operation to ensure all sequences exist for all models that need one before the models execute. + +```yaml +# in macros/generate_sequences.sql + +{% macro generate_sequences() %} + + {% if execute %} + + {% set models = graph.nodes.values() | selectattr('resource_type', 'eq', 'model') %} + {# parse through the graph object, find all models with the meta surrogate key config #} + {% set sk_models = [] %} + {% for model in models %} + {% if model.config.meta.surrogate_key %} + {% do sk_models.append(model) %} + {% endif %} + {% endfor %} + + {% endif %} + + {% for model in sk_models %} + + {% if flags.FULL_REFRESH or model.config.materialized == 'table' %} + {# regenerate sequences if necessary #} + + create or replace sequence {{ model.database }}.{{ model.schema }}.{{ model.name }}_seq; + + {% else %} + {# create only if not exists for incremental models #} + + create sequence if not exists {{ model.database }}.{{ model.schema }}.{{ model.name }}_seq; + + {% endif %} + + {% endfor %} + +{% endmacro %} + +``` + +You can see in the above macro that we’re baking a naming convention here — for any model, the name of the sequence will exist in the same database and schema and follow the naming convention `_seq`. Adhering to this pattern allows us to also create an easy macro to increment the sequences in our model definitions without having to hard code the sequence name in every model that needs a surrogate key. + +```yaml +# in macros/increment_sequence.sql + +{%- macro increment_sequence() -%} + + {{ this.name }}_seq.nextval + +{%- endmacro -%} +``` + +So your model code looks like: + +```yaml +# in dim_customers +... + +with cte_name as ( + ... +) +... + +select + + {{ increment_sequence() }} as customer_id, + first_name, + last_name + +from cte_name + +... +``` + +### Caveats + +Despite the relative simplicity of this strategy, there are a handful of drawbacks with regard to making sure these sequences work the way we want them to. + +- **dbt Run errors -** If an incremental model that has surrogate keys maintained in this way *fails* due to some SQL error, we may end up with gaps in our surrogate key. When dbt goes to execute the model, the sequence is queried, and therefore incremented, but the model failure prevents changes to the target table model. That means the next time we run the model, the incremental model will start on the wrong value, and we may end up with a column that looks like this: + + | surrogate_key_id | + | --- | + | 1 | + | 2 | + | 3 | + | 4 | + | 5 | x + | 8 | + + In fact, most cloud platforms [can’t guarantee](https://docs.snowflake.com/en/user-guide/querying-sequences.html#:~:text=Snowflake%20does%20not%20guarantee%20generating%20sequence%20numbers%20without%20gaps.%20The%20generated%20numbers%20consistently%20increase%20in%20value%20(or%20decrease%20in%20value%20if%20the%20step%20size%20is%20negative)%20but%20are%20not%20necessarily%20contiguous) that sequences will be generated without gaps because of their use of parallel processing, even if we *don’t* have a dbt run error — because queries will be spread across multiple compute clusters, each step might query the sequence at different times, which makes it possible to have an out of order sequence result. This is a major consideration in using sequences — if that’s a deal breaker, you may need additional SQL logic in our models (like a `row_number()` function) to guarantee your keys are monotonically increasing. + +- **Views -** Because sequences in Snowflake increment on every query, using them as the surrogate keys for views would mean every time the view is queried, the sequence would increment and therefore change. This strategy would only work for table or incremental models. +- **Ordering -** Since sequences will be regenerated on every run for tables, and every time an incremental model is regenerated, the order of the resulting query determines which records get assigned to each key. In order to maintain referential integrity (i.e. product_id 1 always means mozzarella), you need to build in `ORDER BY` statements to your models. This can cause adverse performance during table builds. +- **“Load your dims before your facts” -** This strategy can also lead to some very messy DAGs in order to keep relationships intact in your project. As mentioned above, it’s imperative that each product record results in the same surrogate key value every time dbt is run. Additionally, this means that any table that needs to read from this table needs to run downstream of that initial process. This can lead to bottlenecks at runtime. + +Even though configuring MIISKs with sequences can be pretty well automated, it’s a bit of a brittle process that relies on a lot of assumptions and requires a whole lot of bandwidth from the data team to recreate the warehouse should anything go haywire. + +## Hashed surrogate keys + +An alternative to using the traditional MIISK strategy is to use cryptographic hashing functions to *derive the surrogate key values from the data itself,* a fancy way of saying “create a random string for every unique combination of values you find in my table”. These hashing functions are **deterministic**, meaning the same set of inputs will always produce the same output. In our SQL models, we can pass the column or columns that represent to the grain to this hashing function and voilà, a consistent, unique identifier is generated automatically! This has been packaged up in the `surrogate_key()` macro in the `dbt_utils` package ([source](https://github.com/dbt-labs/dbt-utils#surrogate_key-source)), and works across warehouse providers! Check out our SQL Magic post that dives deeper into this function [here](https://docs.getdbt.com/blog/sql-surrogate-keys). + +```sql +# in models/reports/daily_user_orders.sql +with + +orders as ( + select * from {{ ref('fct_orders') }} +), + +agg as ( + + select + date_trunc(day, order_date) as report_date + user_id, + count(*) as total_orders + from orders + group by 1,2 + +), + +final as ( + + select + {{ dbt_utils.surrogate_key([ + 'report_date', + 'user_id' + ]) + }} as unique_key, + * + from agg + +) + +select * from final +``` + +Using hashed keys makes our transformations — every dbt run results in the same exact outputs every time. I can safely delete all my non-source objects in my warehouse, execute a dbt run and be right back where I started (though I wouldn’t necessarily recommend this 😅). + +The analytical warehouses we use now no longer have the same constraints that traditional warehouses had — joins on strings aren’t notably less performant than those on integers, and storing slightly larger values in the surrogate key column is peanuts given the relative cost of storage on these platforms. This strategy also removes the need for tight coupling of transformations to propagate the surrogate key values across our project — anywhere the inputs for the surrogate keys are present, the hashing function produces the same keys, so we can take advantage of parallel processing in our warehouse and avoid the bottlenecks we had before. + +### Caveats + +This strategy is not without its caveats either! + + +- **Collisions -** Although it's *exceedingly* rare, depending on the hashing algorithm you use, it's possible for two different sets of inputs to produce the same outputs, causing erroneous duplicate records in your dataset. Using an MD5 hash (the default for the `dbt_utils.surrogate_key` macro), you have a 50% of a collision when you get up to 2^64 records (1.84 x 10E19 aka a whole lot of data). While [very very very unlikely](https://docs.getdbt.com/terms/surrogate-key#a-note-on-hashing-algorithms), it’s certainly something to consider for truly massive datasets. +- **Datatypes -** If you’re in the process of migrating legacy code to a new warehouse provider, you likely have some constraints on the datatype of your keys from the consumers of your datasets, and may have some issues converting to a string-based key. Luckily, some warehouse providers have hash functions that output integer values (like Snowflake’s `MD5_UPPER/LOWER_64` functions). However, these have fewer bits in the hashing function, so may lead to collision issues on big data sets. +- **Performance -** Hashed keys generally result in long string-type values. On massive datasets on some warehouses, this could cause some performance issues. Unlike MIISKs, string values can’t be easily partitioned to improve query performance. Luckily, as described in the above bullet point, you can choose to utilize hashing functions that output other, more performant datatypes! +- **Storage -** As mentioned above, hash keys will end up with higher storage costs than their MIISK counterparts. Given that the cost of storage in cloud warehouses is extremely cheap, it’s unlikely to be worth the effort to optimize for storage costs. + +## OK, so which one? + +Surrogate keys are a critical component of a logical data model, and as with most anything, you’ve got options when it comes to generating and maintaining them with dbt. Your business’s unique constraints with respect to maintenance overhead, performance, and data size will likely be the primary drivers for your decision. It will also be important to consider your stakeholders’ needs — are they used to seeing data in a particular format? are there one hundred dashboards that will explode if you change some keys from an integer to a string? For many orgs, this is a non-trivial decision! + +For my money, the simplicity of using hashed keys far outweighs the potential benefits of having MIISKs in your data model. Building with dbt works best when all parts of your project are idempotent, and hashed keys require close to zero maintenance. The cost of time spent rebuilding your surrogate keys in your data models if you can’t recreate them with a simple `dbt run` usually offsets any modest performance and storage gains you might be able to achieve with MIISKs. + +Big thanks to [Mike Fuller](https://github.com/mikegfuller) and [Bennie Regenold](https://github.com/bennieregenold7) for help ideating on this blog! diff --git a/website/blog/2022-08-22-narrative-modeling.md b/website/blog/2022-08-22-narrative-modeling.md new file mode 100644 index 00000000000..71252279873 --- /dev/null +++ b/website/blog/2022-08-22-narrative-modeling.md @@ -0,0 +1,236 @@ +--- +title: "Narrative modeling: How structure can tell a story" +description: "Analytics engineers work in the space between technical and business understanding, but tried-and-true modeling patterns often default to the technical. In this article, Ian Fahey makes a case for a pattern of modeling that follows the narrative of the business it represents." +slug: narrative-modeling +authors: [ian_fahey] +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-08-22 +is_featured: true +--- + +The larger a data ecosystem gets, the more its users and stakeholders expect consistency. As the ratio of data models to team members (to say nothing of stakeholders to team members) skyrockets, an agreed-upon modeling pattern often acts as scaffolding around that growth. + +The biggest tool in the toolbox today, dimensional modeling, offers enough consistency to make it the dominant approach in the space, but what might be possible if we shut that toolbox, took a break from our workbench, and instead strolled over to our bookshelf? + +In other words, what if we told a story? + +# The stakeholder’s hydra + +“When ***business questions*** come, they come not single spies, but in battalions.” - noted data professional William Shakespeare + +This is, perhaps, a more collaborative way to describe the analytics engineer / stakeholder relationship than “if you give a mouse a cookie.” + +After all, business questions *should* multiply, following the path from “What happened?” to “Why did that happen?” to “How can we predict what happens in the future?” and finally “How can we make something else happen instead?” + +What does this look like in practice? Let’s take an example: + +*How many orders came in last week?* + +With a straightforward question, formal modeling principles may not be important. You have the source data you need and the information requested. Name it something that works and move on. It’s that simple. + +It’s *not* that simple. + +*How many of the placed orders were fulfilled within three days?* + +*How many items were in each order?* + +*How many of the orders were returning customers?* + +Business curiosity is the quintessential hydra (just look at a dbt DAG), and the analytics engineer should know better than to think a single head is the end of it. + +With an exponential growth in scope, however, comes a question of consistency. Modeling at scale is often easier with a structural framework. Specifically, principled naming conventions and database design help analytics engineers keep flow through stylistic decisions to focus on the request at hand, while providing other team members the ability to support follow-up questions on models they may not have built in the first place. + +# The leading solution + +So what conventions should we use in our projects? + +History and gravity have pulled many a team towards [dimensional modeling](https://docs.getdbt.com/terms/dimensional-modeling), building business molecules out of atomic fcts and dims. This approach benefits from well-deserved “if it ain’t broke” adoption as well as a commanding market share of data team practices. + +Within this framework, our business question would likely be answered from a table called `fct_orders`, because an order is, in a sense, an event. It might have a `customer_id` field that ties it to another model, `dim_customers`. + +These are actually common examples of a fact/dim structure, and if we have this tried-and-true approach, it may not make sense to seek another one. Any alternative would have to have similarly reliable conventions while offering some additional benefit to be worth the departure from the norm. + +But what if that additional benefit was clarity? + +# Introducing narrative modeling + +In the past, I’ve used a framework I call *narrative modeling*, naming and structuring data models so that they tell the story of the business. Unlike dimensional modeling, which was born out of a need to solve a technical problem (expensive data storage), the principles of narrative modeling are people-oriented: + +- Navigating an analytical database should be intuitive and educational +- Traversing the underlying code for an analytical database should be intuitive and stakeholder-friendly +- Adding to an analytical database should be intuitive and scaleable + +Out of those principles came three primary conventions: event tables, details tables, and entity schemas. + +## Event tables + +First, the backbone of a good narrative is the plot: what happened? In our example, *How many orders came in last week?* + +To answer this question, we still need one row per order placed, with timestamps so that we can filter down to specific time periods (e.g. last week). But what should we name it? + +Every principle of narrative modeling included the word “intuitive”. With that in mind, let’s call this table `order_placed`. + +In narrative modeling, we’d call `order_placed` an *event table*. Event tables have the naming convention of `subject_verb` . + +If this looks similar to `fct_` tables, it is! However, the benefit of `order_placed`over `fct_orders` appears when you need an additional order-level event. For instance, *How many orders were fulfilled last week?* + +- In dimensional modeling, you already used `fct_orders`, so you likely would need to rename that model. This can result in naming conventions like `fct_order_placement` and `fct_order_fulfillment` which can feel like reporting on two separate entities rather than two things that can happen to orders. +- In narrative modeling, the sky’s the limit as we stand up `order_fulfilled` alongside `order_placed` with room for `order_cancelled` and all matter of other steps along an order’s flow. + +Now we can model out the narrative thrust of an order, but events can be a little dry without a fleshed-out protagonist. How would we describe our hero, a single order? + +## Details tables + +In narrative modeling, details tables are where a user can expect to find more descriptive information about a particular business entity. + +In the example of orders, an `order_details` table might contain fields like: + +- `customer_id` +- `items_ordered` +- `payment_method` + +Initially, it might seem as though these should just be fields on `order_placed` and, for those examples, it’s a reasonable point[^1]. Consider instead the following: + +- `items_fulfilled` +- `days_to_fulfillment` + +One can imagine a stakeholder wanting to know all five of these data points, but the context for them comes from multiple events in the order’s flow. A `_details` table allows us to assemble a broad bank of knowledge about a given entity (in this case, an order) in one place. If data refresh is performant enough, this could even be the place to surface true snapshot fields like `status` or`current_location`. + +So we have everything that happened to our order and everything we’d want to know about it. Now let’s put it all together…literally. + +## Entity schemas + +I mentioned above that navigating an analytical database should be intuitive and educational. Given the building blocks of event and details tables described above, the intuitive part could be considered complete just by naming convention. If all of our data models were in a single schema, then the following tables would likely sort together: + +- `order_cancelled` +- `order_details` +- `order_fulfilled` +- `order_placed` + +Keeping related information grouped seems sufficient for usability. Then again, I’ve seen enough analytical databases where the database and schema have the same name (e.g. `DATA_MARTS.DATA_MARTS.*`) to see an opportunity for building a database that describes the business as soon as you open it. + +Throughout our example, `order` has been an entity we want to know a lot about. Any undertaking that creates business questions will have dozens if not hundreds of entities that people care about. Even in our example, we’ve glanced off other entities, such as `item` and `customer`. + +Grouping the models for each entity under a schema named for that entity builds out a database that describes the scope of the business in simple terms, while also allowing stakeholders using the database to navigate more purposefully, expanding schemas rather than scrolling tables in a single mega-schema. + +- `analytics` database + - `customer` schema + - `customer_details` + - `customer_created` + - `item` schema + - `item_details` + - `item_fulfilled` + - `order` schema + - `order_cancelled` + - `order_details` + - `order_fulfilled` + - `order_placed` + +This structuring principle can also help address the boogeyman of business entity resolution, overlapping names. If you’re a business that orders parts regularly and then fulfills customer orders, the question *How many orders came in last week?* can get a lot more confusing. At a fast-moving company, a new hire could be asked the question and answer in the wrong context completely[^2], because the tenured stakeholder can no longer imagine mixing them up. + +If instead, the database had schemas for`parts_order` and`customer_order`, that same new hire hits the database, sees those schemas, and thinks “Oh, there are two types…I should probably ask which.” That distinction can be a lot harder to spot in a single analytics schema. + +Intuitive *and* educational. + +# Let’s get visual + +One of the most powerful ways for stakeholders to conceptualize the flow of data through a dbt project is through the DAG visualization in dbt docs. If we consider it a goal of ours to capture business knowledge and have conversations about how we ascertain that knowledge, let’s consider how the two options manifest in their DAGS: + +### Dimensional modeling + +![Untitled](/img/blog/2022-08-22-narrative-modeling/dimensional-modeling-dag.png) + +In this approach, we have the standard `fct` and `dim` tables and a clean DAG flow. Let’s consider some possible shortcomings: + +- Because I made this flow, I know that the `fct_shipments` to `dim_order_items` to `fct_orders` represents a knowledge flow. A package got shipped with items in it, meaning those items themselves are now shipped and if all of the items in a given order are shipped, then the entire order is fulfilled. However, for a new person to learn that in this approach, they would need to enter the models themselves and look at the SQL for *why* they are dependencies. +- We called the table `fct_orders` because orders are events. Because we can conceive of a stakeholder wanting to identify *cancelled* orders, we take an `int` / `fct` approach, but now the intent of `fct` feels a little murky. It captures an event, *order placement*, at which time we *don’t know* it will be cancelled. We could potentially create `dim_orders` and `fct_order_placements` if we want to capture both, but that suggests the `fct` / `dim` design is much more of a choice, one different developers in your codebase might approach differently. + +In general, the dimensional DAG can begin to feel like it’s not for a user who *only* has a business context, which can keep the structural decision-making purely on the analytics engineer or at best only the most technically-adept stakeholders. + +### Narrative modeling + +![Untitled](/img/blog/2022-08-22-narrative-modeling/narrative-modeling-dag.png) + +How does this compare to the above? + +- Now we’re being explicit about our dependencies. We’re telling the viewer we infer items were shipped from packages being shipped, then use those items to identify when an order was entirely fulfilled. +- The order placement and fulfillment steps are both explicit, flowing into an `order_details` table where we can also calculate `days_to_fulfillment` +- The DAG does look a little more complex, with additional nodes and a broader base of final models on the right compared to the slimming effect of the dimensional modeling. This potential drawback could use some contextualization: + - First, we are being more explicit about the business questions we are answering, so each of the milestones of an `order` that might have previously been tucked into the CTEs of `fct_orders` is now a node unto itself + - Second, remember the hydra? Ideally, business questions beget business answers beget new business questions. If the knowledge base needs to expand, it’s reasonable that the DAG might as well. The key, however, is validating that each model is answering a question someone is asking + +To that final point, if presented with the DAG from the narrative modeling approach, stakeholders can participate in the conversation. One can imagine a stakeholder looking at the flow and saying “Interesting that we say an order is fulfilled when each item *ships.* Perhaps we should get shipper data and declare an order fulfilled when all items are *received*.” Because we took the modeling structure as far as possible into business concepts, we can have a conversation on methodology without shouting over a great contextual distance. + +# Benefits in practice + +### Stakeholders find it easier to participate in data modeling + +- They don’t need to learn the idiosyncrasies of fct/dim structuring (e.g. idea that only some business datapoints are called facts). +- The discrete models are framed as events or entities in the business, which means the stakeholder can compare the model’s logic to their subject matter expertise. + +### Intangible business steps are easier to model + +- Knowledge gaps are captured accurately. For instance, if the best way you know that a shipment was received by a customer is that a truck driver scanned it out of the system, you can model `shipment_scanned_out` as an explicit model, followed by an implicit`shipment_received` model referencing it. This stores in code the company’s current point-of-view that the scanning action is the best information available +- Certain business decisions directly drive data transformations. If an entire package costs $50.00 to ship and it has multiple items inside, shipping cost could be attributed to each item via weight or product value. In either case, teams can capture this attribution as `item_apportioned_shipping_cost`. + +### Users can tie business concepts to source data + +- While the schema structure above is focused on business entities, there are still ample use cases for [staging and intermediate tables](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview). +- After cleaning up source data with staging tables, use the same “what happened” approach to more technical events, creating a three-node dependency from `stg_snowplow_events` to `int_page_click_captured` to `user_refreshed_cart` and thus answering the question “where do we get online user behavior information?” in a quick visit to the DAG in dbt docs. + +# Should your team use it? + +Narrative modeling primarily values comprehension as an outcome of data modeling, which can be a high priority for… + +- …companies with a high stakeholder-to-data-team-member ratio. +- …companies with SQL-savvy stakeholders +- …companies looking to on-board new team members at a high rate (as the project in this case is a sketch of the business itself) +- …companies who can dedicate staffing and time to writing strong documentation so that the doors to the models can be thrown open. + +Narrative modeling might not be the right fit for… + +- …companies where storage spend, even in cloud warehouses, needs to be closely tracked. After all, fct/dim models emerged from a need to optimize data storage. +- …companies with BI tools that rely heavily on multi-entity tables. This can generally still work with narrative modeling, however, if there is an additional layer of standard datasets being modeled out of the shared components of your narrative models (to insure data stays in step across contexts). + +# A neverending story + +There’s a classic movie called Desk Set in which Katharine Hepburn runs the reference desk of a major TV network and Spencer Tracy comes in to install a research computer in her department. In gladiatorial bouts of banter, the movie investigates the concept of knowledge and, in particular, how human it should be. At one point, Tracy’s Richard Sumner runs Hepburn’s Bunny Watson through a chunky “train left the station” word problem and I think about her response (abbreviated below) all the time. + +**Richard Sumner:** Now, a train started out at Grand Central, with seventeen passengers aboard and a crew of nine. At 125th Street, four got off and nine got on. At White Plains, three got off and one got on. At Chappaqua, nine got off and four got on. And at each successive stop thereafter, nobody got off, nobody got on till the train reached its next-to-the-last stop, where five people got off and one got on. Then it reached the terminal. + +**Richard Sumner:** How many people got off at Chappaqua? + +**Bunny Watson:** Nine. + +**Richard Sumner:** Uh, would you mind telling me how you arrived at that conclusion? + +**Bunny Watson:** Spooky, isn't it? Did you notice that there are also nine letters in Chappaqua? + +**Richard Sumner:** Are you in the habit of associating words with the number of letters in them? + +**Bunny Watson:** I associate many things with many things. + +**Richard Sumner:** I see. Hmm. + +**Bunny Watson:** Aren't you going to ask me how many people got off at White Plains? Three. + +**Richard Sumner:** But there are ten letters in White Plains. + +**Bunny Watson:** No. Eleven. + +**Richard Sumner:** [beat] But only three got off there. + +**Bunny Watson:** You see, I've only ever been to White Plains three times in my whole life. + +Much like Mr. Sumner, we would be hard-pressed to teach a computer to answer questions the way Bunny does. Her accumulated knowledge and how she accesses it is deeply human. Why, then, do we so often take the accumulated knowledge of subject matter experts across our business and abstract it AWAY from its context to capture it in a data model? Put another way, why do we store business answers in a way that, over time, forgets the questions? + +Narrative modeling can help structure the data with questions and answers stored side-by-side, allowing us to be reference librarians with a killer card catalog. We can model out the expertise of the finance team or the marketing team or the product team in their own words, which keeps the context for not only the next person to join *those* teams, but also the next person to join our own. + +And so, in the face of battalions of business questions, we can become a host unto ourselves. + +### Footnotes +[^1]: However, in terms of the upstream commerce data, it’s conceivable for items to be added to an order as discrete events first before a final order placement event. An API endpoint for an order being placed might not need to know what’s in the cart, but rather just the who and the when. At that point, it’s dealer’s choice whether to join in items for order_placed or order_details + +[^2]: Ask me how I know! diff --git a/website/blog/2022-08-22-unit-testing-dbt-package.md b/website/blog/2022-08-22-unit-testing-dbt-package.md new file mode 100644 index 00000000000..80c0ad639a7 --- /dev/null +++ b/website/blog/2022-08-22-unit-testing-dbt-package.md @@ -0,0 +1,219 @@ +--- +title: "An introduction to unit testing your dbt Packages" +description: "Traditionally, integration tests have been the primary strategy for testing dbt Packages. In this post, Yu Ishikawa walks us through adding in unit testing as well." +slug: unit-testing-dbt-packages +authors: [yu_ishikawa] +tags: [dbt tutorials] +hide_table_of_contents: false + +date: 2022-08-25 +is_featured: true +--- + +_Editors note - this post assumes working knowledge of dbt Package development. For an introduction to dbt Packages check out [So You Want to Build a dbt Package](https://docs.getdbt.com/blog/so-you-want-to-build-a-package)._ + +It’s important to be able to test any dbt Project, but it’s even more important to make sure you have robust testing if you are developing a [dbt Package](https://docs.getdbt.com/docs/building-a-dbt-project/package-management). + +I love dbt Packages, because it makes it easy to extend dbt’s functionality and create reusable analytics resources. Even better, we can find and share dbt Packages which others developed, finding great packages in [dbt hub](https://hub.getdbt.com/). However, it is a bit difficult to develop complicated dbt macros, because dbt on top of [Jinja2](https://palletsprojects.com/p/jinja/) is lacking some of the functionality you’d expect for software development - like unit testing. + +In this article, I would like to share options for unit testing your dbt Package - first through discussing the commonly used pattern of integration testing and then by showing how we can implement unit tests as part of our testing arsenal. + + +## Unit Testing vs. Integration Testing + +Unit testing and integration testing are two common paradigms in create well-tested code. For a great deep dive into the difference between the two check out [this article](https://circleci.com/blog/unit-testing-vs-integration-testing/) from the CircleCI team. At a high level: + +- **Integration tests** are tests which operate against the entire integrated project or application. +- **Unit tests** are tests which verify a single element within a software project, such as an individual function or macro. + +Many dbt Packages use integration tests as their primary testing methodology. For example [dbt-utils](https://github.com/dbt-labs/dbt-utils) has [the integration_tests directory](https://github.com/dbt-labs/dbt-utils/tree/main/integration_tests) so that we can run integration tests by using the generic tests and macros contained within the package. The integration tests directory is essentially a standard dbt project within the dbt-utils package that is tested much the same way any dbt project would be. + +To use the integration tests - you’d simply run `dbt test` within the `integration_tests` directory. The tests execute as normal - meaning you can use your favorite methods of running CI against your dbt project to ensure that your integration tests are passing. + +Integration tests can help give you peace of mind that your package is performing as expected - but they have some drawbacks. Macros and generic tests frequently call other macros and the deeper dependency calls get, the more difficult it becomes to debug your macros using only integration tests. + +In this scenario it can be helpful to go beyond integration tests and implement unit tests for your macros. These unit tests can be run with a [dbt run operation](https://docs.getdbt.com/reference/commands/run-operation). Let’s take a look at a quick example of how this can be done. + +Consider a dbt Package called dbt_sample_package . We would like to implement a simple macro to create a string literal from a string text in a macro named `to_literal` in the file `macros/to_literal.sql`. + +```sql +-- macros/to_literal.sql +{% macro to_literal(text) %} + + '{{- text -}}' + +{% endmacro %} +``` + +--- + +To implement a unit testing macro corresponding to the `to_literal` macro we can create a macro to test our original macro in `integration_tests/macros/test_to_literal.sql`. + +Then we call the `to_literal` macro in the testing macro. and if the result isn’t the same as expected, we raise an error using the [exceptions.raise_compiler_error macro](https://docs.getdbt.com/reference/dbt-jinja-functions/exceptions). + +```sql +-- integration_tests/macros/test_to_literal.sql +{% macro test_to_literal() %} + + {% = dbt_sample_package.to_literal('test string') %} + + {% if result != "'test string'" %} + + {{ exceptions.raise_compiler_error('The test is failed') }} + + {% endif %} + +{% endmacro %} +``` + +--- + +By doing that, we can call the testing macro in the dbt project of integration tests using `dbt run-operation`. + +```shell +dbt run-operation test_to_literal +``` + +--- + +If we want to run all tests with a single command, it would be good to bundle them in a macro. Moreover, we can call the macro with `dbt run-operation`. + +```sql +-- integration_tests/macros/run_unit_tests.sql +{% macro run_unit_tests() %} + + {% do test_to_literal() %} + + {% do another_test() %} + +{% endmacro %} +``` + +--- + +## Unit tests for multiple adapters + +Your dbt Package may support multiple adapters. If you are a postgres user, you understand that the preceding `to_literal` macro doesn’t work on postgres because the expression to deal with string literal is different. So, we have to implement a macro to handle a special case of postgres. Now, we implement the subsequent macro called `postgres__to_literal` in `macros/to_literal.sql` in addition to the implementation above. + +```sql +-- macros/to_literal.sql +{% macro to_literal(text) %} + + {{ return(adapter.dispatch('to_literal', 'dbt_sample_package')(text)) }} + +{% endmacro %} + +{% macro default__to_literal(text) %} + + '{{- text -}}' + +{% endmacro %} + +{% macro postgress__to_literal(text) %} + + '{{- text -}}' + +{% endmacro %} +``` + +--- + +You may think of how we can implement unit testing macros efficiently. We can use the [the adapter.dispatch macro](https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch) even in unit testing macros. As we separate the behavior for postgres, we can implement an independent unit testing macro for postgres as well. + +```sql +-- integration_tests/macros/test_to_literal.sql +{% macro test_to_literal() %} + + {{ return(adapter.dispatch('test_to_literal', 'integration_tests')(text)) }} + +{% endmacro %} + +{% macro default__test_to_literal() %} + + {% result = dbt_sample_package.to_literal('test string') %} + + {% if result != "'test string'" %} + + {{ exceptions.raise_compiler_error('The test is failed') }} + + {% endif %} + +{% endmacro %} + +{% macro postgres__test_to_literal() %} + + {% result = dbt_sample_package.to_literal('test string') %} + + {% if result != "E'test string'" %} + + {{ exceptions.raise_compiler_error('The test is failed') }} + + {% endif %} + +{% endmacro%} +``` + +--- + +We can then select unit tests based on the specified adapter. Let’s assume we have different dbt profiles corresponding to BigQuery and postgres. By specifying a dbt profile based on the adapter, we can select what testing macros are called internally. + +```shell +# Run unit tests on BigQuery +dbt run-operation run_unit_tests --profile bigquery +# `default__test_to_literal` is internally called. + +# Run unit tests on postgres +dbt run-operation run_unit_tests --profile postgres +# `postgres__test_to_literal` is internally called. +``` + +--- + +## Introducing dbt-unittest + +It’s historically been a challenge to do unit testing in your dbt packaging as Jinja2 doesn’t offer a built-in unit testing feature. But, we have good news: dbt provides the `exceptions.raise_compiler_error` macro so that we raise errors within a `dbt run-operation`. Using this, I implemented a dbt Package called [yu-iskw/dbt-unittest](https://hub.getdbt.com/yu-iskw/dbt_unittest/latest/), which is inspired by [python’s unittest module](https://docs.python.org/3/library/unittest.html), to enhance unit testing of dbt Package development. + +[GitHub - yu-iskw/dbt-unittest: A dbt Package provides macros for unit testing](https://github.com/yu-iskw/dbt-unittest) + +Using this, we can re-implement the example using the `dbt_unittest.assert_equals` macro and the implementation gets much simpler. + +```sql +-- integration_tests/macros/test_to_literal.sql +{% macro test_to_literal() %} + + {{ return(adapter.dispatch('test_to_literal', 'integration_tests')(text)) }} + +{% endmacro %} + +{% macro default__test_to_literal() %} + + {% result = dbt_sample_package.to_literal('test string') %} + + {{ dbt_unittest.assert_equals(result, "'test string'") }} + +{% endmacro %} + +{% macro postgres__test_to_literal() %} + + {% result = dbt_sample_package.to_literal('test string') %} + + {{ dbt_unittest.assert_equals(result, "E'test string'") }} + +{% endmacro %} +``` + +--- + +I practiced the idea even in the development of `yu-iskw/dbt-unittest`. The actual testing macros are located [here](https://github.com/yu-iskw/dbt-unittest/tree/main/integration_tests/macros/tests). Moreover, we are able to implement the continuous integration workflow as regular software development. For instance, I implemented [a workflow with GitHub Actions](https://github.com/yu-iskw/dbt-unittest/blob/main/.github/workflows/unit-tests.yml). It enables me to notice there is something wrong with changes. + +Aside from that, it would be great to take a look at other dbt Packages for integration testing and unit testing on dbt hub. For instance, [the dbt_datamocktool package](https://hub.getdbt.com/mjirv/dbt_datamocktool/latest/) is another useful package for unit testing dbt projects. We can create mock CSV seeds to stand in for the sources and refs that your models use and test that the model produces the desired output. That would be useful to mock testing data for your dbt project. + +## Summary + +In this article we’ve: + +- Introduced two approaches for testing your dbt Packages +- Demonstrated a simple unit testing example +- Shown how you can use existing tools to help build out your unit testing capabilities + +Hopefully this is helpful to you in your dbt Package development journey. diff --git a/website/blog/2022-08-31-august-product-update.md b/website/blog/2022-08-31-august-product-update.md new file mode 100644 index 00000000000..df326039f25 --- /dev/null +++ b/website/blog/2022-08-31-august-product-update.md @@ -0,0 +1,64 @@ +--- +title: "August 2022 dbt Update: v1.3 beta, Tech Partner Program, and Coalesce!" +description: "Coalesce is less than 2 months away!" +slug: dbt-product-update-2022-august +authors: [lauren_craigie] + +tags: [dbt updates] +hide_table_of_contents: false + +date: 2022-08-31 +is_featured: false +--- + +Semantic layer, Python model support, the new dbt Cloud UI and IDE… there’s a lot our product team is excited to share with you at [Coalesce](https://coalesce.getdbt.com/register?utm_medium=email&utm_source=hs_email&utm_campaign=q3-2023_coalesce-2022_awareness&utm_content=connect_product-update_) in a few weeks. + +But *how* these things fit together—because of where dbt Labs is headed—is what I’m most excited to discuss. + +You’ll hear more in [Tristan’s keynote](https://coalesce.getdbt.com/agenda/keynote-the-end-of-the-road-for-the-modern-data-stack-you-know), but this feels like a good time to remind you that Coalesce isn’t just for answering tough questions… it’s for surfacing them. For sharing challenges we’ve felt in silos, finding the people you want to solve them with, and spending the rest of the year chipping away at them. As Tristan says in his latest blog, [that’s how this industry moves forward](https://www.getdbt.com/blog/finding-our-next-big-problem/). + +[REGISTER NOW](https://coalesce.getdbt.com/register?utm_medium=email&utm_source=hs_email&utm_campaign=q3-2023_coalesce-2022_awareness&utm_content=connect_product-update_) + + + +## **What's new** + +- **dbt Core v1.3 beta:** Do you use Python for analytics? The first beta prerelease of dbt Core v1.3—including support for dbt models written in Python—is [ready to explore](https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.3)! Check it out, and read more about dbt supported Python models [in our docs](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/python-models). +- **Technology Partner Program:** We just launched our new [Technology Partner Program](https://www.getdbt.com/blog/dbt-labs-technology-partner-program/) with 40+ friends in the Modern Data Stack to provide consistent support for seamless integrations joint-users can trust. Check our new [dbt Cloud integrations page](http://www.getdbt.com/product/integrations) for what’s available today! +- **Single-tenant users:** dbt Cloud v1.1.60 is now available on dbt Cloud Enterprise. + +## What’s better + +- **dbt Cloud UI:** The [new dbt Cloud UI](https://www.getdbt.com/blog/the-dbt-cloud-ui-is-getting-a-makeover/) is in beta, and can be opted-into by any multi-tenant dbt Cloud customer. Cleaned-up interface, better ergonomics, fewer clicks to frequently-used screens. +- **dbt Cloud IDE:** Did you catch [Staging last month](https://www.getdbt.com/blog/staging-highlights-the-latest-from-dbt-labs/) (our quarterly product update)? The dbt Cloud IDE has been overhauled for greater speed and performance, and is now in beta—[enroll to check it out!](https://bit.ly/dbt-cloud-ide-beta) + +## New resources + +**Things to try** 🛠️ + +- **dbt_artifacts v1.2.0:** [Brooklyn Data Co just shipped a pretty significant re-write to the dbt_artifacts package](https://brooklyndata.co/blog/dbt-artifacts-v100). Capture all the metadata generated by dbt at the end of an invocation (project nodes, success rate, test results, etc), and store directly in Snowflake, Databricks, or BigQuery for immediate analysis. +- **dbt YAML validator using JSON schema**: If you do any development in VS Code, [this repo unlocks autocomplete and validation for dbt’s YAML files](https://github.com/dbt-labs/dbt-jsonschema/). Find those tests that never ran because you messed up the indentation. *Not that that would ever happen to you.* +- **dbt Exposures for Hightouch**: [Exposures](https://docs.getdbt.com/docs/build/exposures) in dbt allow you to quickly see how downstream data applications are making use of your dbt models and sources. These don’t have to just represent dashboards in BI tools though — [you can now represent your Hightouch syncs as dbt exposures too](https://hightouch.com/blog/introducing-dbt-exposures-for-hightouch/). +- **Are you a certified dbt developer?** We recently launched our new [Analytics Engineering certification](https://www.getdbt.com/certifications/analytics-engineer-certification-exam/) program, and would love to hear what you think. We personally dug [this writeup from Charles Verleyen](https://medium.com/astrafy/dbt-exam-feedback-8d07a0593648) on what to expect, and exactly how much experience/prep he recommends. + +**Things to read 📚** + +- **How to enforce rules at scale:** It’s best practice to add model tests in dbt, but can you require it? [In his latest blog](https://docs.getdbt.com/blog/enforcing-rules-pre-commit-dbt), Benoit Perigaud (dbt Labs Senior Analytics Engineer) shares how to use the pre-commit-dbt package to do just that. +- **How we shaved 90 minutes off a model:** [Check out how we used the model timing tab](https://docs.getdbt.com/blog/how-we-shaved-90-minutes-off-model) in dbt Cloud to find and re-architect our longest running model. +- **How to decide between hashed or integer surrogate keys:** Dave Connors (dbt Labs Senior Analytics Engineer) [breaks down the pros and cons of each approach](https://docs.getdbt.com/blog/managing-surrogate-keys) in dbt. +- **How to think about dbt Python models in Snowpark:** [Eda Johnson](https://www.linkedin.com/in/eda-johnson-saa-csa-pmp-0a2783/) wrote a nice primer on [how to approach dbt-supported Python models in Snowflake](https://medium.com/snowflake/a-first-look-at-the-dbt-python-models-with-snowpark-54d9419c1c72) with Snowpark Python. +- **dbt Labs is officially partnering with Monte Carlo**: The partnership makes it simple for analytics engineers to [supplement dbt testing with end-to-end observability](https://www.getdbt.com/blog/monte-carlo-dbt-labs-partnering-for-more-reliable-data/). +- **How Comcast accidentally invented a feature store in 2013:** What a genuinely delightful read. Josh Berry details the [peak and pits of a fast-moving data science team](https://towardsdatascience.com/features-are-not-just-for-data-scientists-6319406ac071) that transcended an initial aversion to documentation to build “Rosetta.” + +**Consulting corner** 🌎 + +I just discovered the treasure trove of excellent resources from dbt Labs consulting partners, and want to start sharing more here. Here’s a few you might have missed over the summer: + +- **Reduce ETL costs:** I’ve only just seen [this blog](https://www.mighty.digital/blog/how-dbt-helped-us-reduce-our-etl-costs-significantly) from Mighty Digital, but found it to be a super practical (and concise) introductory guide to rethinking your ETL pipeline with dbt. +- **Explore data:** [Part two of a series on exploring data](https://vivanti.com/2022/07/28/exploring-data-with-dbt-part-2-extracting/) brought to you by Vivanti. This post focuses on working with objects in dbt, but I also recommend the preceding post if you want to see how they spun up their stack. +- **Track historical changes:** [](https://blog.montrealanalytics.com/using-dbt-snapshots-with-dev-prod-environments-e5ed63b2c343)Snapshots are a pretty handy feature for tracking changes in dbt, but they’re often overlooked during initial onboarding. [Montreal Analytics explains how to set them up](https://blog.montrealanalytics.com/using-dbt-snapshots-with-dev-prod-environments-e5ed63b2c343) in dev/prod environments +- **Learn dbt:** Have some new faces on the data team that might need an introduction to dbt? Our friends at GoDataDriven are hosting a [virtual dbt Learn Sept 12-14](https://www.tickettailor.com/events/dbtlabs/752537). + +Thank you! + +*This month’s newsletter was brought to you by: Joel, Gloria, Azzam, Amos, and me (Lauren)* \ No newline at end of file diff --git a/website/blog/2022-09-07-leverage-accounting-principles-when-finacial-modeling.md b/website/blog/2022-09-07-leverage-accounting-principles-when-finacial-modeling.md new file mode 100644 index 00000000000..bf1acdcaa24 --- /dev/null +++ b/website/blog/2022-09-07-leverage-accounting-principles-when-finacial-modeling.md @@ -0,0 +1,168 @@ +--- +title: "Leverage Accounting Principles when Modeling Financial Data" +description: "Modeling financial data is rarely ever easy (or fun). Thankfully, there are accounting principles that can be leveraged to ensure your financial models are complete and accurate." +slug: financial-modeling-accounting-principles +authors: [joe_markiewicz] +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-09-07 +is_featured: true +--- + +Analyzing financial data is rarely ever “fun.” In particular, generating and analyzing financial statement data can be extremely difficult and leaves little room for error. If you've ever had the misfortune of having to generate financial reports for multiple systems, then you will understand how incredibly frustrating it is to reinvent the wheel each time. + +This process can include a number of variations, but usually involves spending hours, days, or weeks working with Finance to: +- Understand what needs to go into the reports +- Model said reports +- Validate said reports +- Make adjustments within your model +- Question your existence +- Validate said reports again + +You can imagine how extremely time consuming this process can be. Thankfully, you can leverage core accounting principles and other tools to more easily and effectively generate actionable financial reports. This way, you can spend more time diving into deeper financial analyses. + + + +I will detail how you are able to leverage these principles, but before I dive in I would be remiss not to mention the dbt packages that are available for popular financial data sources that you can leverage! If you are using Fivetran and currently utilize either Netsuite, QuickBooks, Xero, or Sage Intacct, then you are able to skip the line and use the pre-built reports right out of the box. See below for links to the relevant packages: +- [Netsuite dbt package](https://github.com/fivetran/dbt_netsuite) +- [QuickBooks dbt package](https://github.com/fivetran/dbt_quickbooks) +- [Xero dbt package](https://github.com/fivetran/dbt_xero) +- [Sage Intacct dbt package](https://github.com/fivetran/dbt_sage_intacct) + +These packages generate the three basic financial statement reports (plus a few bonus models) your Finance team will need: +- General Ledger/Transaction Detail: a ledger of all posted transactions within an organization. +- Balance Sheet: a summary of the financial balances within an organization. The quintessential accounting calculation (Assets = Liabilities + Equity). +- Profit and Loss/Income Statement: a report detailing the revenues and expenses within an organization. + +By simply installing the package you can get those reports in your in a matter of minutes, allowing you to bypass the cycle outlined prior. However, if you do not utilize one of these sources with Fivetran, there is nothing to fear! I will detail the modeling principles used in each of these packages. + +Below are the modeling principles I leverage whenever I begin a new financial data modeling adventure. These steps are in order and should be followed as such. + +## Step 1: Understanding the Source Schema +Each normalized financial data source I have worked with is structured in either one of two ways: a single that contains all transactions for the company **OR** a header and line item detail table for each transaction type. Both schema designs have their pros and cons. + +The single transaction table makes it easier to get started with your financial modeling and generating your end reports. However, it doesn’t give you a good view of what type of transactions are included within the table. In order to gain an understanding of the transaction types, you will need to spend some time querying the table to identify what transactions are (and are not) included. See the [Fivetran Netsuite ERD](https://fivetran.com/docs/applications/netsuite-suiteanalytics#schemainformation) as an example of the single transaction table schema. + +Similarly, the header and line item detail table schema version is great for understanding what types of transactions have taken place across the company. However, this schema structure is quite difficult when needing to do downstream analyses. For example, you are required to join the header and line item tables together before unioning them with the other transaction objects in order to holistically view all transactions together. See the [Fivetran QuickBooks ERD](https://fivetran.com/docs/applications/quickbooks#schemainformation) as an example of the header and line item table schema. + +Regardless of which scenario you find your financial data in, it is best to take some time to gain an understanding of the schema design. The next step is going to involve using the raw table(s) to begin the financial modeling journey. + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/hobbit-adventure.gif) + +## Step 2: Generating the General Ledger/Transaction Detail Report +The General Ledger/Transaction Detail report rests at the core of every company's financial story. This report will contain every verifiable transaction (cash coming in, cash going out, etc.) that has taken place within the company, but with a fun accounting twist! + +Okay, it may not be fun, or a twist, but this report will include an important piece of the accounting puzzle for each transaction. Yes, I am going to be talking about debits and credits (aka double entry accounting). + +Each transaction will detail how the entry impacts the bottom line as either a debit or a credit to a relevant account. This is invaluable to us for downstream modeling of the Balance Sheet in particular. The debit and credit method of accounting is critical to the matching principle where each item of revenue should match to an item of expense. Think of it like there should always be an offsetting action for each transaction. If you purchase coffee beans to sell to customers, you will debit the coffee beans and credit cash. This has an impact by increasing your asset (coffee beans) and also decreasing your asset (cash). Perfectly balanced, as all things should be. + +I highly recommend finding a cozy spot on your couch and reading up on [double entry accounting](https://www.fool.com/the-ascent/small-business/accounting/articles/double-entry-accounting/) before modeling your General Ledger/Transaction Detail model. Luckily, I have found the majority of financial data sources that leverage the single transaction detail table also handle the double entry accounting for you within the table (see Sage Intacct as an example). However, if your source resembles the header and line item schema design, then you will likely need to ensure you are accounting for the double entries within your models before rolling up all your transactions into a single table. + +I highly recommend taking a look at how this was modeled for each transaction type within the dbt_quickbooks [double entry transaction models](https://github.com/fivetran/dbt_quickbooks/tree/main/models/double_entry_transactions). Below is a great snippet from the double entry folder which shows how the double entry method is accounted for within the model by using a union (notice the account_id changing to reflect the impact of the transaction on the different accounts). +```sql +select + transaction_id, + transaction_date, + customer_id, + vendor_id, + amount, + payed_to_account_id as account_id, + 'debit' as transaction_type, + 'bill' as transaction_source +from bill_join + +union all + +select + transaction_id, + transaction_date, + customer_id, + vendor_id, + amount, + payable_account_id as account_id, + 'credit' as transaction_type, + 'bill' as transaction_source +from bill_join +``` + +Once you ensure you’re properly accounting for the double entry method in the individual transaction type models, you can union all the transaction type models together to create a single table with all transaction types. + +At this point, regardless of which schema design you began with, you should be at the same place with a single table that contains all your transactions (and their offsetting entry) across the company! Now you can start joining in your dimensional fields such as vendor name, account name, account type, and account classification to name a few. Okay, now that I mention account classification, it is a crucial component for building your end financial models and I should talk more about it. + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/top-gun-classified.gif) + +## Step 3: Verify the Account Classifications +All financial data sources will have some variation of the company chart of accounts. Typically this is in the form of the accounts table. This table is able to be joined to the General Ledger to enrich your transactions with the corresponding account information. This is an extremely important piece of the equation as you want to make sure the transactions are impacting the right accounts and are being classified correctly. The account classification allows you to identify if the account is either an Asset, Liability, Equity, Expense or Revenue account. In turn, this then serves as the key component in generating the Balance Sheet and Income Statement reports. + +Sometimes you will get lucky and the account classification will be provided within the accounts table (like in [dbt_xero](https://github.com/fivetran/dbt_xero_source/blob/0d1d2c02dbb8e1f8371e703651a704127181c88f/models/stg_xero__account.sql#L30)). If it is not, then you will need to add some logic to your accounts model to accurately set the classification. Typically, the logic involves referencing the account type to determine the classification (for example, an Accounts Payable account type should map to a Liability classification). Thankfully, this has been applied to a number of open source projects and can be leveraged by your team! I recommend taking a look at how the [dbt_quickbooks](https://github.com/fivetran/dbt_quickbooks/blob/43282a8cf77670f6e2ac657167dd19c1014ba111/models/intermediate/int_quickbooks__account_classifications.sql#L23-L35) packages map classifications. Likewise, the [dbt_sage_intacct](https://github.com/fivetran/dbt_sage_intacct/blob/4feec47da41fcc28325913dbae5597132ddccd66/models/intermediate/int_sage_intacct__account_classifications.sql#L12-L22) implementation follows the same logic, but instead allows for more flexibility in the form of variables that can be modified and edited if the Chart of Accounts on the finance side changes. + +Once you verify, or create, the account classification field, you are safe to join the account (and other dimensional) tables with your General Ledger table. Now that everything is joined together, you will start to see the tapestry of the financial history unfolding before your eyes. Beautiful, isn’t it. Time really flies when you are having fun. Actually, now that I am on the topic of time, I can’t wait to talk about the next step. Aggregating your General Ledger data by a date period! + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/office-its-a-date.gif) + +## Step 4: General Ledger by Period and the Date Spine +As of right now, your Finance team is probably ecstatic that they currently have a fully functional (and, most importantly, accurate) General Ledger available. While this is great, it can be quite difficult to generate the final financial statements with the data in this format. It will be much easier to wrangle the data if it is aggregated into date periods by account. While you can aggregate by whichever period you like, I suggest aggregating by month, as it allows you and your Finance team to slice and dice by month/quarter/annual to your liking. In order to achieve this, you are going to use our best friend - the date spine. + +Using a combination of jinja and the date spine macros, you will be able to create a table that contains each month from the beginning of the company’s financial history. While it may be daunting, I recommend taking a look at how the date spine is generated within the dbt_quickbooks [general_ledger_date_spine](https://github.com/fivetran/dbt_quickbooks/blob/main/models/intermediate/int_quickbooks__general_ledger_date_spine.sql) model. You can see how the date spine references the General Ledger model and finds the minimum and maximum date to generate the complete spine. This ensures you are not generating any less or more data than is needed for our downstream analyses. + +Once the date spine is created, you should generate a new model to aggregate your General Ledger data by month. In addition to simply aggregating your General Ledger data by month, you will want to make sure you consider the accounts beginning, ending, and net change balance month over month. Calculating these extra fields makes for a seamless Balance Sheet and Income Statement reporting down the line. + +Before generating the beginning, ending, and net change balances for each month, it is important to know that not all classifications will have the same behavior for these fields. Asset, Liability, and Equity accounts will always have a beginning, ending, and net change balance as the accounts have rolling totals that change over time. Conversely, Revenue and Expense accounts only have a net change balance. Calculating these fields can be a multi-/model process. The process should look something along the lines of the following: + +- Aggregate your General Ledger data by month +- Create a cumulative balance for balance sheet accounts +- Generate a beginning and ending balance for balance sheet accounts using the cumulative balance +- Join the General Ledger data with your date spine +- Fill in the empty month records with relevant data showing 0 in net change + +I recommend referencing the dbt_quickbooks [general_ledger_balances](https://github.com/fivetran/dbt_quickbooks/blob/main/models/intermediate/int_quickbooks__general_ledger_balances.sql) model as an example of how to best follow the above steps. Once these steps are completed, you should have a model that includes an entry for every month and every account with the beginning, ending, and net change balances. + +There has been a lot to retain up to this point, but you are almost ready to finish the process and generate the final financial statements without any additional adjustments. Before you get there, you need to capture the Retained Earnings/Adjusted Income entries! + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/crown-adjustment.gif) + +## Step 5: Retained Earnings / Adjusted Income + +A piece that is commonly forgotten is the need to account for retained earnings/adjusted income within your balance sheet calculation. In a nutshell, this calculation is your net income/loss for a given period. Since you decided to take the approach of generating the report by month, you will simply need to calculate the Revenue minus Expenses for each given period and express this as unique entries. + +Thankfully, the bulk of the heavy lifting has already been completed in Step 4 and you can leverage this work to generate the Retained Earnings/Adjusted Income records. By first taking the Revenue records and then subtracting those by the Expense records, you can arrive at the desired outcome. One thing to call out is that you will need to create unique field names for these records as you are essentially generating new data. See the [quickbooks_retained_earnings](https://github.com/fivetran/dbt_quickbooks/blob/main/models/intermediate/int_quickbooks__retained_earnings.sql) model for how this was calculated. + +With that, you are finally at the last piece of aggregating the General Ledger by Period! Combining the Retained Earnings/Adjusted Income model with the General Ledger by Period model. + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/captain-planet-combine.gif) + +## Step 6: Finish the General Ledger by Period Model + +Union the General Ledger by Period model with your Retained Earnings/Adjusted Income model. Just like that, you finished the hardest part of this equation! You now have a fully usable table that contains every single month across your financial history and can see the respective account entry for that month. Bonus points as well for not forgetting about the Retained Earnings/Adjusted Income entries that will be invaluable during the Balance Sheet calculations. + +Now you can finally move into the last steps and generate the Balance Sheet and Income Statement/Profit and Loss statements. + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/devil-prada-glacial-pace.gif) + +## Step 7: Generate the Balance Sheet Report + +To create the Balance Sheet, you can now simply reference your General Ledger by Period and filter for balance sheet account classifications. Bada Bing... you have your Balance Sheet and can slice and dice by period to your heart's content! ❤️ ⚖️ + +Wow that was easy. It can’t be that simple to generate the Income Statement/Profit and Loss report, right? + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/supranos-bada-bing.gif) + +## Step 8: Generate the Income Statement / Profit and Loss Report + +To create the Income Statement/Profit and Loss report you can now simply reference your General Ledger by Period and filter for income statement account classifications. Bada Boom... you have your Income Statement/Profit and Loss report and can slice and dice by period to your heart's content! ❤️ 💸 + +Geez, I wasn't kidding. It really is that easy! + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/supranos-boom.gif) + +# That’s a wrap! +You just walked through a _quick_ 8 step process to take your financial data from raw to comprehensive financial statements. I have little doubts you are currently being given a calzone party by your entire Finance team for generating the perfect financial reports! + +![](/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/parks-and-rec-ben-wyatt.gif) + +This is by no means a simple process, but the silver lining is that this is similarly performed by countless other analysts. You now have the knowledge to go forth and wrangle your own financial data with the same principles other analysts have used. Additionally, with the advent of dbt packages you can directly leverage the past work of other analysts by using the pre-built financial models without any modifications on your end! + +It is important to note that each business may differ greatly from one another. The above principles may not translate 1-to-1 exactly, but they may be slightly modified to fit your business use case. Additionally, the dbt packages may also encounter a similar “one size does not fit all” scenario. That being said, the dbt packages are maintained by passionate individuals who are always excited and willing to grow the package. If you use a dbt package solution and notice your numbers do not tie out, I would encourage opening an Issue to engage in a discourse with the maintainers and community. There may just be an update that can be applied to enhance the package and tie out your financial statements. The dbt packages are a great example of a community of analysts working together to develop pre-built data models for others to leverage. + +In the end, these outlined core principles and packages are intended to be leveraged by your present and future selves. I hope they have been helpful and I look forward to hearing your thoughts. Until next time, I’ll calc-you-later! diff --git a/website/blog/2022-09-08-konmari-your-query-migration.md b/website/blog/2022-09-08-konmari-your-query-migration.md new file mode 100644 index 00000000000..62e0f613a56 --- /dev/null +++ b/website/blog/2022-09-08-konmari-your-query-migration.md @@ -0,0 +1,133 @@ +--- +title: "KonMari your data: Planning a query migration using the Marie Kondo method" +description: "Do your data models spark joy? Let’s talk about how to apply the KonMari Method to a new data migration project." +slug: marie-kondo-query-migration +authors: [lauren_benezra] +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-09-08 +is_featured: true +--- + +If you’ve ever heard of Marie Kondo, you’ll know she has an incredibly soothing and meditative method to tidying up physical spaces. Her KonMari Method is about categorizing, discarding unnecessary items, and building a sustainable system for keeping *stuff*. + +As an analytics engineer at your company, doesn’t that last sentence describe your job perfectly?! I like to think of the practice of analytics engineering as applying the KonMari Method to data modeling. Our goal as Analytics Engineers is not only to organize and clean up data, but to design a sustainable and scalable transformation project that is easy to navigate, grow, and consume by downstream customers. + +Let’s talk about how to apply the KonMari Method to a new migration project. Perhaps you’ve been tasked with unpacking the kitchen in your new house; AKA, you’re the engineer hired to move your legacy SQL queries into dbt and get everything working smoothly. That might mean you’re grabbing a query that is 1500 lines of SQL and reworking it into modular pieces. When you’re finished, you have a performant, scalable, easy-to-navigate data flow. That does take a bit of planning, but you’ll see that we can take this… + +![buried in boxes](/img/blog/2022-09-08-konmari-your-query-migration/buried-in-boxes.webp) + +to THIS! + +![cat perched in kitchen](/img/blog/2022-09-08-konmari-your-query-migration/cat_kitchen.jpeg) + +That’s the power of the KonMari Method. Let’s apply the method specifically to data: + +**KonMari Method** + +1. Commit yourself and stakeholders to tidying up this project +2. Imagine the ideal state of this query +3. Finish discarding unnecessary models and columns +4. Tidy by category +5. Follow the right order—downstream to upstream +6. Validate that the result *sparks joy*, AKA, satisfies all of the consumers’ needs + +Are you ready to tidy?! Summon Marie Kondo! + +![Marie Kondo](/img/blog/2022-09-08-konmari-your-query-migration/mariekondo.gif) + +Think about when you moved to a new house. Maybe, at some point during the packing process, you got annoyed and just started labeling everything as “kitchen stuff”, rather than what was actually put in the boxes. (Isn’t this…everyone?!) So now, in your new kitchen, you’ve got tons of boxes labeled “kitchen stuff” and you don’t know where everything goes, or how to organize everything. You start to unpack, and your housemates come into the kitchen and ask, why is the Tupperware above the fridge? And why the cooking utensils are in the drawer furthest from the stove? + +![nachka-cat.gif](/img/blog/2022-09-08-konmari-your-query-migration/nachka-cat.gif) + +Before you build, you need to plan. And before you plan, you need to get everyone on the same page to understand how they use the kitchen, so you can organize a kitchen that makes sense to the people who use it. So let’s jump into step one. + +## Step 1: Commit yourself and stakeholders to tidying up this project + +This may feel like an unnecessary step, but haven’t you ever started migrating a new query, only to find out that it was no longer being used? Or people found it so difficult to consume that they instead created their own queries? Or you carved out precious time for this project, but the people you need to be involved have not? Or maybe your consumers expected you to have completed this project yesterday...*Initiate anxiety-stomachache now*. + +Take the opportunity to meet with your stakeholders, and get everyone on the same page. These are likely your report-readers, and your report-builders. + +Your *readers* are the stakeholders who are not the boots-on-the-ground engineers or analysts, but rather the people who rely on the output of the engineering and analysis — Head of Marketing, Head of Sales, etc. — these are your housemates who come into the kitchen searching for a fork to eat the dinner prepared for them. + +The *builders* are the post-dbt data analysts — they transform your thoughtfully-curated tables into beautiful analysis dashboards and reports to answer the readers’ questions — Marketing Analyst, Tableau Guru, Looker Developer — these are your housemates who use your meticulously organized kitchen to cook delicious meals. + +You might be thinking, why would I bother the report-reader, when I have the report-builder? Remember, your reader needs to know where the forks live. In this step, it is crucial to set up an *initial* meeting with all of these people to make sure you’re on the same page about what is being built and why. Then you’ll be able to find one person in the group who can be your phone-a-friend for context questions. + +Here are some example questions you’ll want to ask in this initial meeting: + +- How is this data table currently being utilized? +- What transformations are being performed *on top* of this table? Aggregations? Filters? Adjustments? Joins? +- What are the pain points you face with this table? Slow to query? Incorrect outputs? Missing columns? Unnecessary columns? +- What questions do you want this table to answer? Can those questions be broken apart? i.e., Can this table be broken up into smaller tables, each of which answers a different part of the question? Or is it best as one table? +- How can we bucket these sources? Think consumption — where are these subqueries going to be consumed downstream? Do these sources make sense to join upstream? +- If the original table output is incorrect, do they have a table with correct data that we can validate against? How will we know if it is correct? + +## Step 2: Imagine the ideal state of your project + +This is my favorite part. If you dive in to all the boxes labeled “kitchen stuff” with no plan, you’ll end up moving things around multiple times until it feels right. Sometimes, you won’t even get to a place where it feels right before your housemates jumble everything up, because they use the kitchen differently than you. You need the kitchen to flow with the way that you *and* your housemates use the kitchen — if you know that the silverware goes in the drawer closest to the dishwasher, and the cups and glasses go in the cabinet next to the sink, and the mugs go above the coffee pot, you’ll unpack once and everyone will be able to navigate the kitchen with ease. + +Let’s plan how to unpack our query. This may be what you’re working with: 30+ sources all packed into one SUPER query 🦸. + +![many-to-one DAG](/img/blog/2022-09-08-konmari-your-query-migration/many-to-one-dag.png) + +Or, perhaps you’re migrating a stored procedure, and you have DAG Spaghetti that you’re contending with, as [Matt talks through in this article](/blog/migrating-from-stored-procs). + +![spaghetti data DAG](/img/blog/2022-09-08-konmari-your-query-migration/spaghetti-data-dag.png) + +Now we can look at the details of this code, and start to categorize. You can start building out what this may look like as a DAG in a process mapping tool, like [Whimsical](https://whimsical.com/). + +Where can you break a massive query apart, and pull pieces out to create modularizations? Or, where can you combine repeated code to answer a more general question? + +- Use your buckets identified in your initial meetings with clients to identify where you can create re-usable intermediate models. +- Locate repeated joins and subqueries to identify more intermediate models. +- Figure out which sources aren’t really providing answers to the questions, and remove them from your design. + +Perhaps your redesigned DAG looks something like this — you have intermediate models and joins carved out, creating modular, reusable pieces of code ([read more on that here!](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/)). You’ve created a data flow devoid of circular logic, and your end-table has all the necessary components to answer your stakeholders’ questions. + +**Before you accuse me of wishful thinking, this is the result of a real client project! We broke up almost 1500 lines of code in a single query into this beautiful waterfall. Marie Kondo would be proud.* + +![fully konmarid project](/img/blog/2022-09-08-konmari-your-query-migration/fully-konmarid-project.png) + +While you don’t *have to* design your flow this way, it is incredibly important to consider modularity, readability, scalability, and performance in your design. Design with intention! Remember, don’t put your forks too far from the dishwasher. + +## Step 3: Finish discarding unnecessary models and columns + +As you’re pulling items out of your “kitchen stuff” boxes, you may discover that you have Tupperware bottoms without lids, broken dishes, and eight cake pans. Who needs eight cake pans?! No one. There’s some clean out you can do with your kitchen stuff, as well as your data models. + +Now that you have your design, and your notes from your stakeholder meeting, you can start going through your query and removing all the unnecessary pieces. + +Here are a few things to look for: + +- Get rid of unused sources — [there’s a package for that!](https://hub.getdbt.com/dbt-labs/dbt_project_evaluator/latest/) +- Remove columns that are being brought in with import CTEs, but are just clogging your query +- Only bring in the columns you need (this is especially true for BigQuery and Redshift for performance purposes) +- Where you can, do the same with rows! Is a filter being applied in the final query, that could be moved to a CTE, or maybe even an intermediate model? +- Remember that in most cases, it is more performant to filter and truncate the data before the joins take place + +## Steps 4 & 5: Tidy by category and follow the right order—upstream to downstream + +We are ready to unpack our kitchen. Use your design as a guideline for [modularization](/guides/best-practices/how-we-structure/1-guide-overview). + +- Build your staging tables first, and then your intermediate tables in your pre-planned buckets. +- Important, reusable joins that are performed in the final query should be moved upstream into their own modular models, as well as any joins that are repeated in your query. +- Remember that you don’t want to make these intermediate tables *too* specific. Don’t apply filters if it causes the model to be consumable by only one query downstream. If you do this, you aren’t creating a scalable project, you’re just recreating the same issue as your original query, but spread amongst mulitple models, which will be hard to untangle later. + +Your final query should be concretely defined — is it a fact or dimension table? Is it a report table? What are the stepping stones to get there? What’s the most performant way to [materialize](/docs/building-a-dbt-project/building-models/materializations)? + +Build with the goal to scale — when might you need these intermediate models again? Will you need to repeat the same joins? Hopefully you’ve designed with enough intention to know the answer to that last one is “no.” Avoid repeating joins! + +## Step 6: Validate that the result *sparks joy*, AKA, satisfies all of the consumers’ needs + +When you walk into your newly unpacked kitchen, and the counters are organized, you can unload the dishwasher because the location of the forks is intuitive. You ask your housemate to make dinner for everyone, and they navigate the kitchen with ease! + +Ask yourself these questions: + +- Does my finished build design spark joy? Meaning, have I executed my build reflective of my scalable design? +- Is it easy to navigate? Is it easy to understand? +- Are all of the pieces easy to consume, when I need to utilize the modularity in the future? +- Does my final query perform well, and answer all of the consumers’ needs? + +If your answer is yes to these questions, you’ve sparked JOY. Well done friend! If the answer is no, consider which pieces need to be planned again. If your code isn’t scalable, or easy for consumers to use, start from step one again — gather your consumers, try to understand where communication broke down, and redesign. + diff --git a/website/blog/2022-09-13-the-case-against-cherry-picking.md b/website/blog/2022-09-13-the-case-against-cherry-picking.md new file mode 100644 index 00000000000..84a70e08392 --- /dev/null +++ b/website/blog/2022-09-13-the-case-against-cherry-picking.md @@ -0,0 +1,180 @@ +--- +title: "The case against `git cherry pick`: Recommended branching strategy for multi-environment dbt projects" +description: "This is...a cherry picking trashpost." +slug: the-case-against-git-cherry-picking +authors: [grace_goheen] +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-09-13 +is_featured: true +--- + +## Why do people cherry pick into upper branches? + +The simplest branching strategy for making code changes to your dbt project repository is to have a **single main branch** with your production-level code. To update the `main` branch, a developer will: + +1. Create a new feature branch directly from the `main` branch +1. Make changes on said feature branch +1. Test locally +1. When ready, open a pull request to merge their changes back into the `main` branch + +![Basic git workflow](/img/blog/2022-09-13-the-case-against-cherry-picking/1_basic_git_workflow.png) + +If you are just getting started in dbt and deciding which branching strategy to use, this approach–often referred to as “continuous deployment” or “direct promotion”–is the way to go. It provides many benefits including: + +- Fast promotion process to get new changes into production +- Simple branching strategy to manage + +The main risk, however, is that your `main` branch can become susceptible to bugs that slip through the pull request approval process. In order to have more intensive testing and QA before merging code changes into production, some organizations may decide to **create one or more branches between the feature branches and `main`**. + + + +:::caution Don't over-engineer your branching strategy +Before adding additional primary branches, ask yourself - "is this risk *really* worth adding complexity to my developers' workflow"? Most of the time, the answer is no. Organizations that use a simple, single-main-branch strategy are (almost always) more successful long term. This article is for those who *really absolutely must* use a multi-environment dbt project. +::: + +For example, a single dbt project repository might have a hierarchy of 3 primary branches: `dev`, `staging`, and `prod`. To update the prod branch, a developer will: + +1. Create a new feature branch directly from the `dev` branch +2. Make changes on that feature branch +3. Test locally +4. When ready, open a pull request to merge their changes back into the `dev` branch + +In this hierarchical promotion, once a set of feature branches are vetted in `dev`: + +5. The entire `dev` branch is merged into the `staging` branch + +After a final review of the `staging` branch: + +6. The entire `staging` branch is merged into the `prod` branch + +![Multi-environment workflow](/img/blog/2022-09-13-the-case-against-cherry-picking/2_multienvironment.png) + +While this approach—often referred to as “continuous delivery” or “indirect promotion”—is more complex, it allows for a higher level of protection for your production code. You can think of these additional branches as layers of protective armor. The more layers you have, the harder it will be to move quickly and nimbly on the battlefield, but you’ll also be less likely to sustain injuries. If you’ve ever played D&D, you’ll understand this tradeoff. + +Because these additional branches slow down your development workflow, organizations may be tempted to add *more complexity* to increase their speed—selecting individual changes to merge into upper branches (in our example, `staging` and `prod`), rather than waiting to promote an entire branch. That’s right, I’m talking about the beast that is **cherry picking into upper branches**. + +:::info What is cherry picking? + +`git cherry-pick` is a git command that allows you to apply individual commits from one branch into another branch. + +::: + +In theory, cherry picking seems like a good solution: it allows you to select individual changes for promotion into upper branches to unblock developers and increase speed. + +In practice, however, when cherry picking is used this way, it introduces more risk and complexity and (in my opinion) is not worth the tradeoff. Cherry picking into upper environments can lead to: + +- Greater risk in breaking hierarchical relationship of primary branches +- Flawed testing practices that don’t account for dependent code changes +- Increase chance of merge conflicts, draining developer time and prone to human error + +![Gru meets cherry picking](/img/blog/2022-09-13-the-case-against-cherry-picking/3_gru.jpg) + +## If you’re not testing changes independently, you shouldn’t promote them independently + +If you’ve attempted a branching strategy that involves cherry picking into upper environments, you’ve probably encountered a scenario like this, where feature branches are only tested in combination with others: + +![Feature branches tested in combination with others](/img/blog/2022-09-13-the-case-against-cherry-picking/4_scenario_1.png) + +1. Alex wants to make a code change, so they create a new branch from `dev` called `feature_alex` +1. Becca has a different code change she’s working on, so she creates a new branch from dev called `feature_becca` +1. Alex’s changes are approved, so they merge `feature_alex` into `dev`. +1. Becca’s changes are approved, so she merges `feature_becca` into `dev`. +1. Carol is working on something else, so she creates a new branch from `dev` called `feature_carol`. +1. Carol’s changes are approved, so she merges `feature_carol` into `dev`. +1. The testing team notices an issue with Carol’s new addition to `dev`. +1. Alex and Becca’s changes are urgent and need to be promoted soon, they can’t wait for Carol to fix her work. Alex and Becca cherry-pick their changes from `dev` into `staging`. +1. During final checks, the team notices an issue with Alex’s changes in `staging`. +1. Becca is adamant that her changes need to be promoted to production immediately. She can’t wait for Alex to fix their work. Becca cherry-picks her changes from `staging` into `prod`. + +**What’s the problem?** + +In the example above, the team has only ever tested `feature_becca` *in combination with* `feature_alex` —so there’s no guarantee that `feature_becca`’s changes will be successful on their own. What if `feature_becca` was relying on a change included in `feature_alex`? Because testing of branches is not conducted independently, it’s risky to merge independently. + +## Feature branches contain more than meets the eye + +Let’s imagine another version of the story, where Carol’s changes are the only ones that are ultimately merged into `prod`: + +![Feature branches contain more than meets the eye...](/img/blog/2022-09-13-the-case-against-cherry-picking/5_scenario_2.png) + +1. Alex wants to make a code change, so they create a new branch from `dev` called `feature_alex`. +1. Becca has a different code change she’s working on, so she creates a new branch from `dev` called `feature_becca`. +1. Alex’s changes are approved, so they merge `feature_alex` into `dev`. +1. Becca’s changes are approved, so she merges `feature_becca` into `dev`. +1. Carol is working on something else, so she creates a new branch from `dev` called `feature_carol`. +1. Carol’s changes are approved, so she merges `feature_carol` into `dev`. +1. The testing team approves the entire `dev` branch. +1. `dev` is merged into `staging`. +1. During final checks, the team notices an issue with Alex and Becca’s changes in `staging`. +1. Carol is adamant that her changes need to be promoted to production immediately. She can’t wait for Alex or Becca to fix their work. Carol cherry-picks her changes from `staging` into `prod`. + +**What’s the difference?** + +Because `feature_carol` was created *after* `feature_alex` and `feature_becca` were already merged back into `dev`, `feature_carol` is *dependent* on the changes made in the other two branches. `feature_carol` not only contains its own changes, it *also* carries the changes from `feature_alex` and `feature_becca`. Even if Carol recognizes this and only cherry-picks the *individual* *commits* from `feature_carol`, she’s still not in the clear because of the previously mentioned testing dependency. `feature_carol`’s commits have only ever been tested *in combination with* `feature_alex` and `feature_becca`. + +## Repeated merge conflicts drain development time + +In order to avoid this dependency issue, your team might have the idea to create feature branches directly from `prod` (instead of `dev`). If we imagine the previous scenario with this alteration, however, we can easily see why this doesn’t work either: + +![Development branches made from prod](/img/blog/2022-09-13-the-case-against-cherry-picking/6_scenario_3.png) + +1. Alex wants to make a code change, so they create a new branch from `prod` called `feature_alex`. +1. Becca has a different code change she’s working on, so she creates a new branch from `prod` called `feature_becca`. +1. Alex’s changes are approved, so they merge `feature_alex` into `dev`. +1. Becca’s changes are approved, so she merges `feature_becca` into `dev`. +1. Carol is working on something else, so she creates a new branch from `prod` called `feature_carol`. +1. Carol’s changes are approved, so she merges `feature_carol` into `dev`. +1. The testing team approves the entire `dev` branch. +1. `dev` is merged into `staging`. +1. During final checks, the team notices an issue with Alex and Becca’s changes in `staging`. +1. Carol is adamant that her changes need to be promoted to production immediately. She can’t wait for Alex or Becca to fix their work. Carol cherry-picks her changes from `staging` into `prod`. + +Now, `feature_carol` only contains its individual changes—the team can merge her branch independently into `dev`, `staging`, and ultimately `prod` without worrying about accidentally pulling along the changes from the other two branches. + +**What’s the problem?** + +A new issue emerges, however, if `feature_alex` or `feature_becca` alter the *same lines of code* as `feature_carol`. When `feature_carol` is merged into each of the primary branches, Carol will have to solve merge conflicts every time in the exact same way to ensure the hierarchy of the branches remain consistent. This takes time and is prone to human error. + +## What to do instead: The recommended branching strategy for multi-environment dbt projects + +![Bernie is begging you to stop cherry picking](/img/blog/2022-09-13-the-case-against-cherry-picking/7_bernie.jpg) + +In the end, **cherry picking into upper branches** is a branching strategy that causes more trouble than it’s worth. + +Instead, if you decide to use a branching strategy that involves multiple primary branches (such as `dev`, `staging`, and `prod`): + +- Protect your `dev` branch with a dbt cloud [CI job](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration) +- Ensure thorough code reviews (check out our recommended [PR template](https://docs.getdbt.com/blog/analytics-pull-request-template)) +- Only promote each primary branch hierarchically into each other + +![Hierarchical branching strategy](/img/blog/2022-09-13-the-case-against-cherry-picking/2_multienvironment.png) + +If issues arise during testing on the `dev` or `staging` branch, the developers should create additional branches as necessary to fix the bugs until the *entire* branch is ready to be promoted. + +As mentioned previously, this approach does have a clear disadvantage—it might take longer to fix all of the bugs found during testing, which can lead to: + +- Delayed deployments +- Code freezes on `dev`, creating a backup of out-dated feature branches waiting to be merged + +Thankfully, we can mitigate these delays by doing rigorous testing on the *individual* feature branches, ensuring the team is extremely confident about the change *prior* to merging the feature branch into `dev`. + +Additionally, developers may supplement the above workflow by creating hotfixes to quickly resolve bugs in upper environments + +:::info What is a hotfix? + +A `hotfix` is a branch that is created to quickly patch a bug typically in your production code. If a high-stakes bug was discovered in `prod`, a hotfix branch would be created from `prod`, then merged into `prod` as well as *all subordinate branches* (`dev` and `staging`) once the change has been approved. Similarly, if a high-stakes bug were discovered in `staging`, a hotfix branch would be created from `staging`, then merged into `staging` as well as *all subordinate branches* (`dev`) once the change has been approved. This allows you to fix a bug in an upper environment without having to wait for the next full promotion cycle, but also ensures the hierarchy of your primary branches is not lost. + +::: + +Even with its challenges, hierarchical branch promotion is the recommended branching strategy when handling multiple primary branches because it: + +- Simplifies your development process: Your team runs more efficiently with less complex rules to follow +- Prevents merge conflicts: You save developer time by avoiding developers having to manually resolve sticky merge conflicts over and over +- Ensures the code that's tested is the code that is ultimately merged into production: You avoid crisis scenarios where unexpected bugs sneak into production + +Now I’ll admit it: this blog post was mostly just a venting session, providing me a cathartic outlet to rage against cherry picking (my Slack dms are open if you want to see all of the memes that *didn’t* make it into this post). + +And you may be left thinking… ok, jeez Grace, I won’t cherry pick into upper branches. But how do I *actually* set up my dbt project to properly use hierarchical branch promotion? + +Don’t worry, a guide and training course are on the way ;) diff --git a/website/blog/2022-09-28-analyst-to-ae.md b/website/blog/2022-09-28-analyst-to-ae.md new file mode 100644 index 00000000000..b89638520d9 --- /dev/null +++ b/website/blog/2022-09-28-analyst-to-ae.md @@ -0,0 +1,163 @@ +--- +title: "Analysts make the best analytics engineers" +description: "You often hear of the disgruntled analyst becoming the analytics engineer..and loving it? In this article, Brittany walks through what makes analysts ripe for analytics engineering." +slug: analysts-make-the-best-aes +authors: [brittany_krauth] +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-09-29 +is_featured: true +--- + +When you were in grade school, did you ever play the “Telephone Game”? The first person would whisper a word to the second person, who would then whisper a word to the third person, and so on and so on. At the end of the line, the final person would loudly announce the word that they heard, and alas! It would have morphed into a new word completely incomprehensible from the original word. That’s how life feels without an analytics engineer on your team. + +So let’s say that you have a business question, you have the raw data in your , and you’ve got dbt up and running. You’re in the perfect position to get this curated dataset completed quickly! Or are you? + + + +You’ve got three steps that stand between you and your finished curated dataset. *If you don’t have an analytics engineer*, then the work may be split up like this: + +![The in series development workflow](/img/blog/2022-09-28-analyst-to-ae/old_workflow.png) + +The first step is that your analyst will start researching the stakeholder’s request and will try to boil it down to a high-impact question that will drive the business. What do they really want to know? For example, your stakeholder might come to you with the following: + +***“We need to be able to track usage of our product, and we’d like to have some data around Active Users.”*** + +Analysts are experts at taking broad statements and turning them into specific pieces of work. + +“Some data” could mean: + +1. A single KPI with a trendline over time +1. A dashboard broken out into various categories +1. A filterable and queryable table for ad-hoc analysis + +“Active Users” could mean: + +1. Users that logged in during a fixed time period +1. Users with a session longer than a certain amount of time +1. Users that interacted with a particular feature + +From there, the analyst will build out the initial documentation and write down what they want the final dataset to look like. If your analyst is not trained as an analytics engineer, this is the point that they will need to hand the project over to a data engineer to build the [model](https://docs.getdbt.com/docs/building-a-dbt-project/building-models). + +The first time that a team of analysts and data engineers builds a curated dataset, they will often expect this process to be a straight line to completion. Expectations are that the process will look something like this: + +![The expectation](/img/blog/2022-09-28-analyst-to-ae/first_loop.png) + +What quickly becomes apparent is that this push for a linear process often ends up creating three unanticipated “loops”: + +![The reality](/img/blog/2022-09-28-analyst-to-ae/multiple_loops.png) + +## Loop 1: Reacting to findings in the raw data + +Let’s say that your analyst has defined the business need as an `Active Users` dataset with “Unique users that log in during a given day”. The analyst will try to do as much discovery work up-front as possible, because it’s hard to predict exactly what you’ll find in the raw data. When a data engineer gets stuck writing a model, they’ll need to reach out to the analyst for some additional discovery. When an analyst-turned-analytics-engineer has a question while writing a model, they don’t need to wait to talk to anyone, and they can begin researching it immediately. Which brings us to our first point: + +

Analysts already know the data that they want.

+ +If the raw `Login` dataset below contains two different date (`Login_Date` and `Session_Date`) fields, a data engineer would be stuck. They can’t just guess because using the wrong date field will create an entirely different metric! So they must go back to the analyst for clarity on which field to use. We’ve just had a full loop with two hand-offs, and the data engineer hasn’t even begun building the model yet. + +| Login_Date | Session_Date | User_Id | +|------------|--------------|---------| +| 2022-08-01 | 2022-08-01 | 123 | +| 2022-08-01 | 2022-08-03 | 123 | +| 2022-08-04 | 2022-08-04 | 975 | +| 2022-08-04 | 2022-08-04 | NULL | + +In comparison, your new analytics engineer could quickly pivot based on the findings in the raw data. They can skip the endless loops of “discovery”, because they can react in the moment. When they see the two date fields for `Login_Date` and `Session_Date`, they can instantly apply their product knowledge, compare against examples, or reach out to their business contacts to diagnose the difference and select the correct date for the model. + +If the business need includes looking at `Active Users` by `Groups A, B, and C`, then that adds an extra layer of complexity. Without an analytics engineer, you’ll see extra loops and hand-offs to finalize all the business logic, how to handle NULL values, and even just the final format. + +**Model format returned by data engineering** + +| Date | Active Users Group A | Active Users Group B | Active Users Group C | +|------------|----------------------|----------------------|----------------------| +| 2022-08-01 | 34 | 60 | 61 | +| 2022-08-02 | 77 | 86 | 37 | +| 2022-08-03 | 71 | 9 | 6 | +| 2022-08-04 | 63 | 87 | 10 | + +**Model format needed for the BI tool** + +| Date | Group | Active Users | +|------------|---------|--------------------| +| 2022-08-01 | Group A | 34 | +| 2022-08-01 | Group B | 60 | +| 2022-08-01 | Group C | 61 | +| 2022-08-02 | Group A | 77 | +| 2022-08-02 | Group B | 86 | +| 2022-08-02 | Group C | 37 | + +## Loop 2: Reacting to unexpected results + +Analysts are the first (and sometimes only) line of defense to catch data quality issues. Once a dataset is aggregated down to a single number to answer a business question, it’s often impossible to know if there’s an incorrect filter or wrong set of logic. + +When a model is built for the first time, all parties often “don’t know what they don’t know”. As the analyst digs into the curated dataset, they will find updates that need to be made to the model. Hence, our second point: + +

Analysts question everything.

+ +`Group A` for `Active Users` may be focused on `Participant` roles. The stakeholder had explained to the analyst that they want to exclude any `Admins` that will have a different experience in the product. + +| User_Id | Location | Role | Level | Zone | +|---------|------------|-------------|-------|------| +| 123 | California | Editor | AAA | 1 | +| 427 | Utah | Participant | ABA | 1 | +| 864 | Georgia | Admin | CCC | 3 | + +A data engineer working off of a “build list” will add a filter for `WHERE Role = 'Participant'`. During the data validation step, the analyst would discover that there is actually a third `Role` of `Editor` that no one was aware of. This would create a loop where the data engineer would have to edit the model to replace it with `WHERE Role != 'Admin'`. + +The analyst caught the issue because they have the appropriate context to validate the dataset. Analysts work directly with the stakeholder, so they’ll also be more familiar with company-specific and department-specific acronyms. + +**As we all know, data validation is an art, not a science.** Analysts will employ everything from a “sniff test” (view a random sample of rows) to individual examples (one-to-one exact matching to another system). An analyst will have to use their experience to know when the dataset is “good enough” for the stakeholder and their question since 100% accuracy might not be the goal. And if we're being honest, sometimes being directionally correct is all that’s needed to make a business decision. + +An analyst is able to identify which areas do *not* need to be 100% accurate, which means they can also identify which areas *do* need to be 100% accurate. + +> dbt makes it very quick to add [data quality tests](/docs/build/tests). In fact, it’s so quick, that it’ll take an analyst longer to write up what tests they want than it would take for an analyst to completely finish coding them. + +When data quality issues are identified by the business, we often see that analysts are the first ones to be asked: + +- Why are over half of the `User_Ids` now showing as NULL? +- Why is this chart showing a User’s `Location` that’s not in the USA? +- Why is the dashboard filter saying that all User’s `Zones` are Zone 2? + +This familiarity with what types of data quality issues are the most important to the business means that an analyst can often identify in advance which automated tests should be added to a data model. + +## Loop 3: Reacting to mismatched documentation + +There’s nothing worse than coming back to view a curated dataset months later (or maybe after someone on the team has offboarded) and learning that there is nothing written to explain why certain logic exists. Or worse, the documentation exists but it no longer matches what the model actually does. Which brings us to our third and final point: + +

Analysts understand the pain of a poorly documented dataset.

+ +It’s much easier to keep to a naming guide when the writer has a deep understanding of the data and how it’s referenced by the business. Analysts are often asked repeat questions by stakeholders, which is extra incentive to create great documentation from the beginning. + +> A data engineer will document a dataset based on what they needed to know to build it. An analyst with analytics engineering skills will document a dataset based on *how to use it going forward.* + +If we want to know how certain logic was built technically, then we can reference the SQL code in dbt docs. If we want to know *why* a certain logic was built into that specific model, then that’s where we’d turn to the documentation. + +- Example of not-so-helpful documentation ([dbt docs can](https://docs.getdbt.com/docs/building-a-dbt-project/documentation) build this dynamically): + - `Case when Zone = 1 and Level like 'A%' then 'True' else 'False' end as GroupB` +- Example of better, more descriptive documentation (add to your dbt markdown file or column descriptions): + - Group B is defined as Users in Zone 1 with a Level beginning with the letter 'A'. These users are accessing our new add-on product that began in Beta in August 2022. It's recommended to filter them out of the main Active Users metric. + +In the second example, the documentation is written with the stakeholder and business in mind, and analysts are experts at this translation of the technical to the non-technical. + +## Are you convinced? + +Teach your analyst to be an analytics engineer, so they can do their own model building for curated datasets. You’ll start to see their process evolve into multiple smaller iterations, as they add fields and test them as they build. No loops, no hand-offs, and no Telephone Game. Your new workflow will look a little like this: + +![Parallel development](/img/blog/2022-09-28-analyst-to-ae/new_workflow.png) + +Instead of trying to define all the various groups of `Active Users` at once, an analytics engineer can be validating the `Group A` rows, while adding `Group B` to their local environment, while still working with the stakeholder on the definition of `Group C`. + +**All this to say: Your analyst turned analytics engineer is the key to unlocking fast development of curated datasets with next-level data quality and documentation.** + +### How to empower your analysts to level up their skillset + +Here are some next steps to get you started on converting your analysts to analytics engineers: + +1. Many analysts are already very comfortable querying data using SQL Select statements. For those that aren’t, challenge them to start pulling data for ad-hoc requests using SQL. Ask them to compare some of the common transformations in your BI tool to SQL functions or rebuild them using CTEs. This will prepare them for learning about dbt SQL models. +1. Start incorporating peer review as part of the dashboard publishing process. Also, consider how you set up your dashboard environments (do you have a local development area, a review area, and a published area?). This will prepare them for learning about Git, development environments, and version control. +1. Talk to your analyst about how they decide to create alerts in your BI tool, or any regular reviews that they do of existing dashboards for data accuracy. And what are the current data governance practices that each dashboard follows (Data Dictionary? Style Guide?). This will prepare them for learning about the dbt `.yml` file. + +Learn more on how to apply a fresh framework to existing analytics projects to upskill your ***Analyst to an Analytics Engineer*** at my [Coalesce 2022 presentation](https://coalesce.getdbt.com/agenda/analyst-to-analytics-engineer). I hope to see you there! + + diff --git a/website/blog/2022-10-12-how-to-design-and-structure-metrics.md b/website/blog/2022-10-12-how-to-design-and-structure-metrics.md new file mode 100644 index 00000000000..c2f58011cfe --- /dev/null +++ b/website/blog/2022-10-12-how-to-design-and-structure-metrics.md @@ -0,0 +1,394 @@ +--- +title: "How to design and structure dbt metrics: Recommendations for getting started" +description: "The introduction of the dbt Semantic Layer expands what users can do with dbt but introduces a familiar questions around where logic should live. Read along as the dbt Labs team talks about best practices through the lens of two different examples!" +slug: how-to-design-and-structure-metrics + +authors: [callum_mccann] + +tags: [dbt product updates] +hide_table_of_contents: false + +date: 2022-10-12 +is_featured: true +--- + +--- + +**IMPORTANT:** This document serves as the temporary location for information on how to design and structure your metrics. It is our intention to take this content and turn it into a Guide, like [How we structure our dbt projects](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview), but we feel that codifying information in a Guide first requires that metrics be rigorously tested by the community so that best practices can arise. This document contains our early attempts to create best practices. In other words, read these as suggestions for a new paradigm and share in the community where they do (or don’t) match your experiences! You can find more information on where to do this at the end. + +--- + +## The power of a semantic layer on top of a mature data modeling framework + +As a longtime [dbt Community](https://www.getdbt.com/community/join-the-community/) member, I knew I had to get involved when I first saw the dbt Semantic Layer in the now infamous [`dbt should know about metrics` Github Issue](https://github.com/dbt-labs/dbt-core/issues/4071). It gave me a vision of a world where metrics and business logic were unified across an entire organization; a world where the data team was no longer bound to a single consuming experience and could enable their stakeholders in dozens of different ways. To me, it felt like the opportunity to contribute to the next step of what dbt could become. + +In past roles, I’ve been referred to as the `dbt zealot` and I’ll gladly own that title! It’s not a surprise - dbt was built to serve data practitioners expand the power of our work with software engineering principles. It gave us flexibility and power to serve our organizations. But I always wondered if there were more folks who could directly benefit from interacting with dbt. + +The Semantic Layer expands the reach of dbt **by coupling dbt’s mature data modeling framework with semantic definitions.** The result is a first of its kind data experience that serves both the data practitioners writing your analytics code and stakeholders who depend on it. Metrics are the first step towards this vision, allowing users to version control and centrally define their key business metrics in a single repo while also serving them to the entire business. + +However, this is still a relatively new part of the dbt toolbox and you probably have a lot of questions on how exactly you can do that. This blog contains our early best practice recommendations for metrics in two key areas: +- **Design**: What logic goes into metrics and how to use calculations, filters, dimensions, etc. +- **Structure**: Where these metrics will live in your dbt project and how to compose the files that contain your metrics + +We developed these recommendations by combining the overall philosophy of dbt, with our hands-on learning gathered during the beta period and internal testing. + + + +**Pre-reading:** We recommend reading through the [metrics documentation](/docs/building-a-dbt-project/metrics), which contains a table of all the required/optional properties. + +### When to put business logic in the semantic layer vs the modeling layer + +Our instinct when designing metrics might be to encode as much information as possible into the semantic layer. An example of this is case statements - the analytics engineer’s gut instinct might be to mimic tools of the past and provide complicated case statements for the metric `expression` property to try and capture the nuance of how it should be calculated. + +But remember - you always have the option of performing this logic _in the modeling layer_. This is the key difference between dbt and other semantic layer offerings - by sitting the semantic layer atop a mature transformation layer, you always have the option to configure and optimize your logic within your models and then _define semantic components with intentionality_. + +Getting the balance just right is a learning experience and developing community best practices and standards will take time, which is why it’s important for us to think from first principles. What should really be our goal when determining whether logic lives in a model or a metrics? + +To explore this question and begin to develop an intuition, we’ll walk through two examples of handling this divide. + +## Basic example: Revenue + +### Designing your metrics + +In this example, we’ll cover the basics of defining a metric and a fairly straightforward example of where users can draw the line between the semantic layer and the modeling layer. You should finish this section with a better understanding of dbt metrics and its relationship to the modeling layer. + +In the past, the `marts` tables were often your end stage layer before data was consumed in another tool or system. Now, the mart is the springboard for the creation of our metric. So we'll begin by looking our end-state `marts` model called `order_events` that looks something like the below table, but on the order of millions of rows instead of five. Our finance team uses the below model to better understand revenue but inconsistencies in how it's reported have led to requests that the data team centralize the definition in the dbt repo. + +| event_date | order_id | order_country | order_status | customer_id | customer_status | amount | +| --- | --- | --- | --- | --- | --- | --- | +| 2022-10-01 | 1 | United States | completed | 19 | Healthy | 10 | +| 2022-10-01 | 2 | France | completed | 36 | Churn Risk | 15 | +| 2022-10-02 | 2 | France | returned | 36 | Churned | 15 | +| 2022-10-02 | 3 | Turkey | completed | 20 | Healthy | 80 | +| 2022-10-03 | 4 | Korea | completed | 14 | Churn Risk | 24 | + +### Logic in the modeling layer vs the semantic layer + +We know from our upstream dbt models that the `amount` field represents the revenue from from each order. The inconsistent reporting, however, has arisen because the correct definition of revenue only refers to orders that are completed, not returned. Some teams aren’t familiar with this additional filter and it has led to company wide misreporting. + +The solution is to use the flexibility of the dbt modeling layer to add a boolean field called `is_active_row` that shows whether or not the row in question is the most recent version. With this, we can understand and filter out duplicate rows that may be connected to the same order. + +Once we have this field, we reach a diverging path: + +- If we are not interested in seeing the history of `order_events` , we can add a `where` clause **to the model itself**. This would ensure there is only one row per order. +- If we **are** interested in seeing the history of `order_events` , we can add a `filter` to the metric definition to ensure that these duplicate order rows don’t cause us to misreport revenue + +Both of these paths ensure that only the correct orders are included in the metric calculation but one does it at the modeling layer and the other the semantic layer. There is no **best** path here - it depends on your organization's reporting and analytics needs. For this example, we’ll say that our business isn’t interested in understanding orders that have gone from completed to returned and so we’ll use option one moving forward. Now lets define the metric: + +```yaml +version: 2 +metrics: + - name: total_revenue + label: The revenue of our business + model: ref('order_events') + description: "The revenue for our business, as defined by Jerry in Finance" + + calculation_method: sum + expression: amount + + timestamp: event_date + time_grains: [day, week, month, all_time] + + dimensions: + - customer_status + - order_country + + ## We don't need this section because we chose option 1 + ## filters: + ## - field: order_status + ## operator: '=' + ## value: 'completed +``` + +Each of the properties of the above definition are defined [in the metrics documentation](https://docs.getdbt.com/docs/building-a-dbt-project/metrics), but let’s dig into the two that might require some additional explanation. The two in question are `expression` and `dimensions`. + +In plain english, the `expression` property is the sql column (or expression) that we are applying the calculation method on. In our example above, this simplifies to `sum(amount)`. However, this doesn’t **need** to be a field in the model. It could also be a sql expression like `case when condition = true then field else 0 end` . + +And then there’s `dimensions`. + +### Choosing which dimensions to use with your metric + +The `dimensions` attribute is a bit more nuanced than the others because it involves curating the ways through which a user can interact with the metric. To that end … + +❗ **We recommend curating dimensions, not including all columns within the model. Most models contain dimensions that aren’t relevant for end-user analysis.** + +What do we mean? Well, there is a lot of nuance in what constitutes a useful or less useful dimension that is dependent on the shape of the underlying data and the ways with which the metric will be consumed. Continuing with our revenue use case, here are some examples: + +- **Useful Dimensions:** + - `customer_status`: This field is helpful to end users because it allows them to break down the revenue generated by each customer status grouping. Members of the retention team might be interested in understanding the long-term trends of revenue from the Churn Risk group so that they can better understand the impact that their retention initiatives campaigns have had. + - `order_country`: This field is useful because it allows members of the finance team to break down the accepted revenue from each country of origin so that they can better understand which countries are experiencing the highest growth. +- **Less Useful Dimensions:** + - `order_status` : Given that order_status is a part of the metric definition, it doesn’t make sense to include in the acceptable dimensions list because the value returned would always be `completed`. + - `order_id`: Each order id corresponds to a single order and a single point in time. Grouping the metric of revenue by order_id would just return the base grain of the table and the same value as the amount field - not useful from a metric perspective! +- **Nuanced Dimensions:** + - `customer_id`: This is an interesting field because it can be both good and bad depending on the context in which it is used and the underlying data. In our example use case, this dimension wouldn’t be that useful - it would contain too many unique values and tracking the individual revenue impact by a single customer doesn’t make sense on a retail scale. + - In a SaaS business though, it might make more sense - especially with usage based pricing. The Customer Success team might be interested in tracking the revenue of certain customers and ensuring that they remain consistent. + +To quote Cameron Afzal, Product Manager of the dbt Semantic Layer: + +> Thoughtful curation of dimensions is essential for three main reasons: +- **Relevance:** Analysts must include the dimensions most relevant to answering the question. +- **Trust**: Curating high-quality dimensions with little to no known errors helps ensure trust in analysis results and the decisions that follow. +- **Efficiency**: Curation provides a faster path to high-quality analysis results. +> + +To put it another way, **metrics are most useful when every dimension provided can help provide answers to the business.** + +## Advanced example: NPS + +### Designing a complex metric + +Now let’s look at a more complex example of a metric - one that is built from components that could theoretically themselves be metrics. The metric in question is Net Promoter score, which is used by the dbt Labs internal analytics team to understand the experience that users are having on dbt Cloud. + +For those of you who are unfamiliar with the industry metric of Net Promoter Score, here is a [great article from the folks over at Delighted on how it is calculated.](https://delighted.com/net-promoter-score) The short version of it is `the percentage of promoters - the percentage of detractors`. + +--- + +Here at dbt Labs we provide users with short surveys where they can provide feedback (as well as in a few other locations). The data is collected from those surveys is used to calculate our NPS Score, which helps us understand user sentiment over time. + +Given that these surveys come from a few different sources, there is a large amount of upstream modeling performed in order to unify them in a single model, but the end result is something that looks like the table below: + +| feedback_date | unique_id | feedback_source | user_type | account_plan | score | nps_category | +| --- | --- | --- | --- | --- | --- | --- | +| 2022-10-01 | 1 | nps_tool_1 | developer | team | 5 | detractor | +| 2022-10-01 | 2 | nps_tool_2 | read_only | developer | 8 | promoter | +| 2022-10-02 | 3 | nps_tool_1 | developer | enterprise | 10 | promoter | +| 2022-10-02 | 4 | nps_tool_1 | developer | developer | 7 | passive | +| 2022-10-02 | 5 | nps_tool_2 | developer | team | 9 | promoter | +| 2022-10-03 | 6 | nps_tool_1 | developer | enterprise | 7 | passive | + +The dbt Internal Analytics team ([long may they reign](https://www.linkedin.com/feed/update/urn:li:activity:6962884130569080833/)) took this data and decided to build the NPS Score metric into our repo so that it could be surfaced to stakeholders in multiple tools. This process is where we began to form our opinions on what should live in the modeling layer vs semantic layer - but these are sure to progress as we add in more and more real world use cases. + +### Option 1: Putting everything in the semantic layer + +If we wanted to store all the logic inside metric definitions, we could use the following code in the Semantic Layer section to create 6 different metrics that result in the NPS Score metric. This would allow end users to retrieve the NPS Score they are interested in a version-controlled, standard way across any of their BI tools of choice. Additionally, it allows users to individually slice/dice any of the component metrics by themselves. + +```yaml +metrics: + - name: total_respondents + label: Total of NPS Respondents + model: ref('customer_nps') + description: 'The count of users responding to NPS surveys in dbt Cloud.' + calculation_method: count + expression: unique_id + timestamp: created_at + time_grains: [day, month, quarter, year] + dimensions: + - feedback_source + - account_plan + - user_type + + - name: total_promoter_respondents + ......... ##same as total_respondents + filters: + - field: nps_category + operator: '=' + value: "'promoter'" + + - name: total_detractor_respondents + ......... ##same as total_respondents + filters: + - field: nps_category + operator: '=' + value: "'detractor'" + + - name: promoters_pct + label: Percent Promoters (Cloud) + description: 'The percent of dbt Cloud users in the promoters segment.' + calculation_method: expression + expression: "{{metric('total_promoter_respondents')}} / {{metric('total_respondents')}}" + timestamp: created_at + time_grains: [day, month, quarter, year] + dimensions: + - feedback_source + - account_plan + - user_type + + - name: detractor_pct + ... ##same as promoters_pct + expression: "{{metric('total_detractor_respondents')}} / {{metric('total_respondents')}}" + + - name: nps_score + label: Net Promoter Score + description: 'The NPS (-1 to 1) of all dbt Cloud users.' + calculation_method: expression + expression: "{{metric('promoters_pct')}} - {{metric('detractors_pct')}}" + timestamp: created_at + time_grains: [day, month, quarter, year] + dimensions: + - feedback_source + - account_plan + - user_type + +``` + +### Option 2: Keeping logic in the modeling layer + +But what if we didn’t want to encode all that information in the metric definitions? If we didn’t need the ability to dig into the component metrics and only wanted to look at the final score? In that case, we could encode most of the logic into the model itself and define the metric on top of that! + +Thinking through this, we know that our NPS Score is a series of ratios dependent on conditions of which category people fall into with the end result being a number between 100 to -100. That number is usually then *displayed* in a percentage format but it is *calculated* as a number. + +So in order to reduce the complexity of metric code, we can add a new field into the model that assigns an `nps_value` to each survey received. The logic for this field would assign a value of 100, 0, or -100 depending on the survey’s `nps_category`. Example code below: + +```sql +case + when nps_category = 'detractor' then -100 + when nps_category = 'promoter' then 100 + else 0 +end as nps_value +``` + +The end result of adding this code to the model would look something like this: + +| feedback_date | unique_id | feedback_source | user_type | account_plan | score | nps_category | nps_value | +| --- | --- | --- | --- | --- | --- | --- | --- | +| 2022-10-01 | 1 | nps_tool_1 | developer | team | 5 | detractor | -100 | +| 2022-10-01 | 2 | nps_tool_2 | read_only | developer | 9 | promoter | 100 | +| 2022-10-02 | 3 | nps_tool_1 | developer | enterprise | 10 | promoter | 100 | +| 2022-10-02 | 4 | nps_tool_1 | developer | developer | 7 | passive | 0 | +| 2022-10-02 | 5 | nps_tool_2 | developer | team | 9 | promoter | 100 | +| 2022-10-03 | 6 | nps_tool_1 | developer | enterprise | 7 | passive | 0 | + +Now that each survey has an associated `nps_value` we can forgo the ratio calculations used in the Metric Logic section and create our NPS Score metric as a single average metric. + +```yaml +metrics: + - name: nps_score + label: NPS Score + model: ref('customer_nps') + calculation_method: average + expression: nps_value + timestamp: created_at + time_grains: [day, month, quarter, year] + dimensions: + - feedback_source + - account_plan + - user_type +``` + +
+ Why does this work? + +This is a slightly different way of calculating NPS from the usually provided formula but it ends up with the same result. Here is why: + +- `promoter_pct` was defined as `total_promoter_respondents` / `total_respondents` + - In our example dataset, this nets out to 3 / 6 = 50%. + - If we instead assign a value of 100 and take the average, it becomes 300 / 6 = 50. +- `detractor_pct` was defined as `total_detractor_respondents` / `total_respondents` + - In our example dataset, this nets out to 1 / 6 = 16.67%. + - If we instead assign a value of 100 and take the average, it becomes -100 / 6 = -16.67. +- Therefore, our `nps_score` follows suit: + - In our example dataset, 50% - 16.67% = 33.33% + - If we instead assign a value of 100 and take the average, it becomes 200 / 6 = 33.33 + +The underlying principle of why this works is based on the fact that averages divide the sum of the values in the set by their number. In more dbt friendly terms, what it really means is that average is creating the following equation: `sum(value)/count(*)`. In the first example implementation, we were doing roughly the same thing with multiple metric definitions - the only difference was our numerator was a count that assigned each row a value of 1. So if we duplicate that logic and give each row a value of 1 then we can create far fewer metrics. + +But that only gets us to the `promoter_pct` and `detractor_pct` metrics. In order to combine these both into a single metric definition, we needed to change the value that we assign. Given that the total range of values that the metric could output is -100 (all detractors) to 100 (all promoters) we can assign each of those categories that peak value, along with 0 for passives. This means that when the numerator is aggregated, it nets out promoters against detractors just like the documented equation does `promoter score - detractor score` . + +
+ +**Is this what I should do?** + +[It depends!](https://twitter.com/SBinLondon/status/1413113782214266885) There will be times when it might be better to have logic stored in the modeling layer and there will be times when it might be better to have logic stored in the semantic layer. Our shorthand is to only include logic in the semantic layer if it is needed by our stakeholders - if they don't need to analyze the components, we keep them in the modeling layer. In the end, the needs of your business stakeholders should drive your decision on where to keep this logic. + +## How to structure your metrics + +Now that we’ve designed our metrics, let's move on to structuring them within our project. We'll examine the different ways to organize metrics and take a look at the pros and cons of several strategies. + +### Folder structure + +If you follow [dbt’s best practices for structuring your project](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview), you will have a folder structure that looks similar to this: + +```yaml +models: + staging: + intermediate: + marts: +``` + +Your marts folder would most likely contain your end-state models ready for business consumption. Given that metrics are meant for business consumption, we are presented with two options - staying within the same framework or representing metrics as their own level. + +We recommend Option A (metrics within marts) but recognize that some people might prefer Option B (metrics within models). + +**A. Metrics within marts** + +Create a metrics folder within marts and use this to contain all of your metric definitions. + +```yaml +models: + staging: + intermediate: + marts: + metrics: +``` + +**B. Metrics within models** + +Create a metrics folder within models and use this to contain all of your metric definitions. + +```yaml +models: + staging: + intermediate: + marts: + metrics: +``` + +### File structure + +Once you’ve decided ***where*** to put your metrics folder, you can now decide ***how*** you want to structure your metrics within this folder. Choose one of two methods for structuring metrics: + +**Option A: The all-in-one YML method** +This method follows a similar pattern to [dbt’s best practices around model structure](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview). The introduction of the metrics folder is the only change from the standard best practice. + +In practice, the all-in-one YML method would look like the following: + +```yaml +## Metrics within Marts +models: + marts: + metrics: + - metrics.yml +------ +## Metrics within Models +models: + metrics: + - metrics.yml +``` + +**Option B: The single-metric-per-file method** +In this method, you create *one* yml file for *each* metric*.* Although this is an opinionated stance that differs from [dbt’s best practices](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview), here are some reasons why this **could** be useful: + +- Individual files are more easily discovered by new analytics engineers as your organization expands +- Individual files can more easily define specific code owners that may not be part of the data team. + +For example, Jerry from the Finance department is the code owner for the `revenue` metric definition and oversees it for the business. So, any change to this specific file would need Jerry’s sign-off. + +This can be tricky for code owners who aren’t familiar with your git flow, but it brings them into the chain of responsibility for the metric definition. It also helps them take ownership for reporting on this metric and creates a responsible party when definitions need to change. + +The single-file-code-owner method would look like this: + +```yaml +models: + metrics: + marts: + - revenue.yml + - average_order_value.yml + - some_other_metric_name.yml +``` + +### Folder and file structure is a preference, not a hard rule + +In the end, all of the structuring information above is just a recommendation. Your project probably has a defined convention in how nodes are organized, whether or not it follows dbt best practices, and you should continue to follow your own organizational practices. That said, we do recommend that metrics should be separate from model yml files. The reason? + +**Metrics are important business objects unto themselves and should live separate from the model definitions.** + +## A call to action + +This is just the beginning of dbt metrics and the Semantic Layer. We have a number of exciting ideas for expanding capabilities that we plan to begin work on in the coming months. However, we can’t do that without you. + +This semantic layer is a fundamental change to what it means to interact with dbt and ultimately most of the best practices will come from the dbt Community - folks like you. It does not matter if you consider yourself an "expert" on this - we want to talk to you and hear how you are using or would like to use metrics and the semantic layer. Y’all are going to be our guiding light to help us make sure that all the functionality we add helps **you** serve the needs of your business. + +If your experience with the Semantic Layer match what we’ve written in this post, and especially if they don’t, please share [comments and feedback in this Discourse Discussion](https://discourse.getdbt.com/t/how-to-design-and-structure-metrics/5040)! + +Additionally, I would invite you to join us over at #dbt-core-metrics on the dbt Slack where we’ll be posting updates, answering questions, discussing usage, and hopefully responding with the best emojis. diff --git a/website/blog/2022-10-19-polyglot-dbt-python-dataframes-and-sql.md b/website/blog/2022-10-19-polyglot-dbt-python-dataframes-and-sql.md new file mode 100644 index 00000000000..5a18890e906 --- /dev/null +++ b/website/blog/2022-10-19-polyglot-dbt-python-dataframes-and-sql.md @@ -0,0 +1,347 @@ +--- +title: "Stronger together: Python, dataframes, and SQL" +description: "Going polyglot is a major next step in the journey of dbt Core. It expands possibilities and also the potential for confusion right along with it. SQL, dataframes, and Python are stronger together and polyglot dbt allows informed practitioners to choose when to use each." +slug: polyglot-dbt-python-dataframes-sql + +authors: [doug_beatty] + +tags: [dbt product updates] +hide_table_of_contents: false + +date: 2022-10-18 +is_featured: true +--- + +# Stronger together: Python, dataframes, and SQL + +For years working in data and analytics engineering roles, I treasured the daily camaraderie sharing a small office space with talented folks using a range of tools - from analysts using SQL and Excel to data scientists working in Python. I always sensed that there was so much we could work on in collaboration with each other - but siloed data and tooling made this much more difficult. The diversity of our tools and languages made the potential for collaboration all the more interesting, since we could have folks with different areas of expertise each bringing their unique spin to the project. But logistically, it just couldn’t be done in a scalable way. + +So I couldn’t be more excited about dbt’s polyglot capabilities arriving in dbt Core 1.3. This release brings Python dataframe libraries that are crucial to data scientists and enables general-purpose Python but still uses a shared database for reading and writing data sets. Analytics engineers and data scientists are stronger together, and I can’t wait to work side-by-side in the same repo with all my data scientist friends. + +Going polyglot is a major next step in the journey of dbt Core. While it expands possibilities, we also recognize the potential for confusion. When combined in an intentional manner, SQL, dataframes, and Python are also stronger together. Polyglot dbt allows informed practitioners to choose the language that best fits your use case. + +In this post, we’ll give you your hands-on experience and seed your imagination with potential applications. We’ll walk you through a [demo](https://github.com/dbt-labs/demo-python-blog) that showcases string parsing - one simple way that Python can be folded into a dbt project. + +We’ll also give you the intellectual resources to compare/contrast: +- different dataframe implementations within different data platforms +- dataframes vs. SQL + +Finally, we’ll share “gotchas” and best practices we’ve learned so far and invite you to participate in discovering the answers to outstanding questions we are still curious about ourselves. + +Based on our early experiences, we recommend that you: + +✅ **Do**: Use Python when it is better suited for the job – model training, using predictive models, matrix operations, exploratory data analysis (EDA), Python packages that can assist with complex transformations, and select other cases where Python is a more natural fit for the problem you are trying to solve. + +❌ **Don’t**: Use Python where the solution in SQL is just as direct. Although a pure Python dbt project is possible, we’d expect the most impactful projects to be a mixture of SQL and Python. + + + +## Polyglot dbt: An alloy of Python, dataframes, and SQL + +dbt Core 1.3 [gives you the ability](https://www.getdbt.com/blog/introducing-support-for-python/) to use Python models to materialize dataframes as tables within your dbt DAG. In combination, Python and SQL models form an alloy within a dbt project, yielding net new properties not found in isolation. + +This is a simultaneous two-part unlock. One, we get a general purpose programming language in Python. Two, we get access to special-purpose dataframe libraries for reading from / writing to the database. + +Some pretty cool functionality follows: +1. **Vast ecosystem of Python packages**: The rich Python ecosystem of packages is the heart of varied use cases like machine learning, exploratory data analysis (EDA), data set generation, and many, many more. +1. **Dataframe syntax for data set manipulation**: There’s a vibrant community of Python-first developers that can more easily contribute to dbt projects when they can use dataframe syntax. This is especially useful in conjunction with data science use-cases. +1. **Python workflows where your data lives**: Most Python work being done today is done outside of the data platform that stores the source data, meaning developers have to transfer the data into the Python environment first, which adds significant friction to the development workflow. In the case of Snowpark DataFrames, data movement is reduced by pushing the computation down to the data platform. + +## Trade-offs between SQL and dataframe syntax + +Once you’re set up - the next question is: what _should_ you use Python for? How should you think about tradeoffs between SQL vs. dataframes? I haven’t personally pondered this deeply… but the folks at [Ponder](https://ponder.io/) have 😉 They published a series of posts comparing the two in terms of: + +- [convenience](https://ponder.io/pandas-vs-sql-part-4-pandas-is-more-convenient/) - dataframes go well with data science libraries like [Scikit-learn](https://scikit-learn.org/stable/) +- [conciseness](https://ponder.io/pandas-vs-sql-part-2-pandas-is-more-concise/) - dataframes have concise syntax for normalization, one-hot encoding, rolling averages, and other uses +- [flexibility](https://ponder.io/pandas-vs-sql-part-3-pandas-is-more-flexible/) - rows and columns in dataframes can be transposed and functions can be applied along columns or rows + +Gaining **your own** sense of these differences will empower you to create your own alloy mix of polyglot dbt models. + +## Comparing dataframe libraries + +Before we get into our hands-on example, let’s take a look at the nuts and bolts of getting your project working with different dataframe types. Multiple data platforms and dataframe libraries are supported in dbt Core as of version 1.3, but not uniformly (see compatibility table below). See [here](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/python-models#specific-data-platforms) for platform-specific setup instructions. + +| **Type of data frame** | **Snowflake** | **Databricks** | **BigQuery** | +|----------------------------|------------------------------------|-----------------------------------|---------------| +| Snowpark DataFrames | ✅ | ❌ | ❌ | +| pandas DataFrames | ✅ | ✅ | ✅ | +| Spark DataFrames | ❌ | ✅ | ✅ | +| pandas-on-Spark DataFrames | ❌ | ✅ | ✅ | +| Koalas DataFrames | ❌ | ✅ | ✅ | + +
+ More detailed comparisons and trade-offs + +Snowpark DataFrames are only supported in Snowflake, while Spark DataFrames are only available on Databricks and BigQuery. It’s also worth keeping in mind that different types of dataframes use [different syntax](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/python-models#dataframe-api-and-syntax). + +We’ve intentionally not included Jinja within Python models: a primary use of Jinja is for control flow and accessing dynamic-esque variables both of which you can just do within Python! The other main use for Jinja within dbt is for creating abstractions across differing database syntaxes. At this time, there’s no unified syntax abstraction across the different types of dataframes. (But someone will probably [make one](https://xkcd.com/927/)!) + +Although pandas DataFrames may seem like the ideal solution due to their support across data platforms, they come with their own tradeoffs. For instance, they can’t take advantage of the efficiency of native types like Spark and Snowpark DataFrames. They’re also limited by memory – large data sets will quickly exhaust available memory. In addition to that, they are constrained by being single-threaded, so they can not take advantage of multiple cores. Although pandas can be parallelized via solutions like Dask, Modin, etc., both Snowpark and Spark DataFrames will handle these scaling challenges natively. So use Snowpark, pandas-on-Spark DataFrames, and Spark DataFrames whenever you can! (Note: pandas-on-Spark DataFrames were formerly known as Koalas DataFrames and now commonly called pandas API DataFrames.) + +In Snowflake, any Snowpark DataFrame transformations specified in Python are actually compiled to SQL before execution. + +
+ +## First hands-on experience using a Python package + +Now that we have sufficient background covered, let’s dive into a real-world use-case. + +The full source code for this demo is available at: +- https://github.com/dbt-labs/python-string-parsing + +This example is using [DuckDB](https://duckdb.org/) as the database backend. You can use this same code by copying it in your project that is using your database adapter of choice. + +Our example will give you hands-on experience with three things: +1. read data from a table into a dataframe +1. parse uniform datetimes out of a variety of string formats using the `dateutil` library +1. writes the result back into a table + +Date/time parsing is a common use-case because dates and times are ubiquitous in transactional source data. Often, the source format is unknown, and it may even be a mixture of multiple formats. The `dateutil` package has a method that will do a best-effort attempt at parsing a string into a Python datetime object, and it will raise an exception when it is unable to parse the input. (Users of pandas may already be familiar with the `pandas.to_datetime` method, which uses `dateutil`). The following demo uses `dateutil` to parse source data with unknown date/time formats. + +In this example we’ll: +1. Install the requirements in a virtual environment +1. Build the dbt project +1. Examine the output + +### Components of the dbt project + +This dbt project has only two main pieces (besides our seed data to mimic source data): +1. Transformation logic within the dbt Python model +1. Configuration of data testing for quality assurance + +#### The dbt Python model + +```python +import dateutil + + +def try_dateutil_parse(x): + try: + return dateutil.parser.parse(x) + except: + return + + +def model(dbt, session): + df = dbt.ref("source_data") + + df['parsed_transaction_time'] = df['transaction_time'].apply(try_dateutil_parse) + + return df +``` + +This model tries to parse the raw string value into a Python datetime. When not successful, it yields a `None` value rather than raising an error. The `dateutil` can handle a wider range of formats than most data platforms’ native functionality. + +#### Testing the result + +During the build process, dbt will check if any of the values are null. This is using the built-in [`not_null`](https://docs.getdbt.com/docs/building-a-dbt-project/tests#generic-tests) test, which will generate and execute SQL in the data platform. + +Our initial recommendation for testing Python models is to use [generic](https://docs.getdbt.com/docs/building-a-dbt-project/tests#generic-tests) and [singular](https://docs.getdbt.com/docs/building-a-dbt-project/tests#singular-tests) tests. + +```yaml +version: 2 + +models: + - name: transactions + columns: + - name: parsed_transaction_time + tests: + - not_null +``` + +### Download the repository and install requirements + +The full source code for this demo is available at: +- https://github.com/dbt-labs/python-string-parsing + +The only prerequisites for this demo are `python3` and `git`. You can verify both are installed and you’re on the right version via these commands in your terminal: +```shell +python3 --version +git --version +``` + +Assuming both are availble in your system, then you can clone the example repository using your method of choice: + +
+HTTPS + +```shell +git clone https://github.com/dbt-labs/demo-python-blog.git +cd demo-python-blog +``` + +
+ +
+SSH + +```shell +git clone git@github.com:dbt-labs/demo-python-blog.git +cd demo-python-blog +``` + +
+ +
+GitHub CLI + +```shell +gh repo clone dbt-labs/demo-python-blog +cd demo-python-blog +``` + +
+ +Then you'll create a virtual environment and install all the dependencies. Choose your shell / operating system from the list below and run the commands (defaulting to `zsh`/`bash`): + +
+POSIX bash/zsh + +```shell +python3 -m venv env +source env/bin/activate +python3 -m pip install --upgrade pip +python3 -m pip install -r requirements.txt +source env/bin/activate +``` + +
+ +
+POSIX fish + +```shell +python3 -m venv env +source env/bin/activate.fish +python3 -m pip install --upgrade pip +python3 -m pip install -r requirements.txt +source env/bin/activate.fish +``` +
+ +
+POSIX csh/tcsh + +```shell +python3 -m venv env +source env/bin/activate.csh +python3 -m pip install --upgrade pip +python3 -m pip install -r requirements.txt +source env/bin/activate.csh +``` +
+ +
+POSIX PowerShell Core + +```shell +python3 -m venv env +env/bin/Activate.ps1 +python3 -m pip install --upgrade pip +python3 -m pip install -r requirements.txt +env/bin/Activate.ps1 +``` +
+ +
+Windows cmd.exe + +```shell +python -m venv env +env\Scripts\activate.bat +python -m pip install --upgrade pip +python -m pip install -r requirements.txt +env\Scripts\activate.bat +``` +
+ +
+Windows PowerShell + +```shell +python -m venv env +env\Scripts\Activate.ps1 +python -m pip install --upgrade pip +python -m pip install -r requirements.txt +env\Scripts\Activate.ps1 +``` +
+ + +### Build it +Once the dependencies are all installed, we can build the project: + +```shell +dbt build +``` + +### Query the result of the dbt transformation + +Congrats on successfully running your first dbt Python model! Let’s confirm the output visually by running the following query: + +```shell +duckcli demo.duckdb --table --execute "select id, transaction_time, parsed_transaction_time from parse_datetimes order by id" +``` + +As you can see, each of the various input formats were successfully parsed into a uniform and standardized format. + +| id | transaction_time | parsed_transaction_time | +|----|----------------------------------|-------------------------| +| 1 | Fri, 16 Dec 2022 02:59:36 +0000 | 2022-12-16 02:59:36 | +| 2 | Sun, 25 Dec 22 02:59:36 +0000 | 2022-12-25 02:59:36 | +| 3 | Thursday, 31-Mar-22 02:59:36 UTC | 2022-03-31 02:59:36 | +| 4 | Thu, 02 Jun 22 02:59:36 +0000 | 2022-06-02 02:59:36 | +| 5 | Thu, 17 Feb 2022 02:59:36 +0000 | 2022-02-17 02:59:36 | +| 6 | 2022-03-28 02:59:36+00:00 | 2022-03-28 02:59:36 | +| 7 | 2022-10-22 02:59:36+00:00 | 2022-10-22 02:59:36 | +| 8 | 2022-10-02 02:59:36+00:00 | 2022-10-02 02:59:36 | +| 9 | Monday, 03-Jan-2022 02:59:36 UTC | 2022-01-03 02:59:36 | +| 10 | Thu, 25 Aug 2022 02:59:36 +0000 | 2022-08-25 02:59:36 | +| 11 | 2022-05-29T02:59:36+00:00 | 2022-05-29 02:59:36 | +| 12 | 2022-08-02 02:59:36+00:00 | 2022-08-02 02:59:36 | +| 13 | 2022-10-18 02:59:36+00:00 | 2022-10-18 02:59:36 | + +⚠️ Remember to deactivate the environment as a final step: +``` +deactivate +``` + +And there you go! A real live python example for a common analytics use case! Now let’s think about what we learned. + +## Best practice recommendations + +Based on our early experiences using dbt Python models and our ongoing conversations with members of the dbt Community, here are some recommended “do’s and don'ts” we think will help set you up for success. + +### ✅ **Do**: + +- Use Python when it is better suited for the job – model training, using predictive models, matrix operations, exploratory data analysis (EDA), and Python packages that can assist with complex transformations. +- Use the native the dataframe type and syntax for your data platform. Use a notebook environment (and a small sample of your data set) for initial development before copying it into dbt. + +### ❌ **Don’t**: + +- Use Python where the solution in SQL is just as direct. Although a pure Python dbt project is possible, we’d expect the most impactful projects to be a mixture of SQL and Python. +- Perform web scraping or download data from the web. +- Use pandas unless absolutely necessary. Although pandas can be useful in the prototyping stage, scaling to larger data sets often requires a platform-native type like Snowpark, Spark, or pandas-on-Spark. +- Translate your entire project to be Python-only. Although possible, if models are already written in SQL and there’s not a specific benefit to it being in Python, just leave it as SQL. + +### 🤷 We don’t know yet! +There are several outstanding questions where you are invited to contribute to the discussion: +- [Reusable Python logic](https://github.com/dbt-labs/dbt-core/discussions/5741) +- [Debugging output](https://github.com/dbt-labs/dbt-core/discussions/5799) +- [Testing Python models](https://github.com/dbt-labs/docs.getdbt.com/discussions/1811) +- [Abstractions over different dataframe APIs](https://github.com/dbt-labs/dbt-core/discussions/5738) + +## Conclusion + +Python is particularly well suited for many use-cases within a dbt project, including: +- **Data science** model training or in-line deployment +- **Utilizing packages** within the Python ecosystem like [ftfy](https://ftfy.readthedocs.io/en/latest/), [dateutil](https://dateutil.readthedocs.io/en/stable/index.html), etc. +- **Exploratory data analysis** (EDA) using functions and packages such as [`pandas.describe`](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.describe.html), [Pandas profiling](https://pandas-profiling.ydata.ai/docs/master/index.html), [Great Expectations](https://github.com/great-expectations/great_expectations), etc. +- Generating **synthetic data sets** using [Faker](https://faker.readthedocs.io/en/master/) +- **Linear programming** libraries like [CVXOPT](https://cvxopt.org/), [PULP](https://github.com/coin-or/pulp), [CVXPY](https://www.cvxpy.org/), [ECOS](https://github.com/embotech/ecos-python), [Google OR-Tools](https://github.com/google/or-tools), [SciPy](https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.linprog.html), etc. +- **Simulation** using [SimPy](https://simpy.readthedocs.io/en/latest/examples/index.html), [Monte Carlo simulation](https://github.com/matsonj/nba-monte-carlo), what-if analysis +- More yet to come! + +There is no one programming language to rule them all, so there’s no programming language hill that we are going to die on! We’ll make sure that dbt supports a wide variety of languages and let you make well-reasoned decisions for each individual use case. + +We are excited to hear what you discover and build! Please share with us in the [#dbt-core-python-models](https://getdbt.slack.com/archives/C03QUA7DWCW) or [#i-made-this](https://getdbt.slack.com/archives/C01NH3F2E05) channels in Slack. diff --git a/website/blog/2022-10-24-demystifying-event-streams.md b/website/blog/2022-10-24-demystifying-event-streams.md new file mode 100644 index 00000000000..39829c3bca0 --- /dev/null +++ b/website/blog/2022-10-24-demystifying-event-streams.md @@ -0,0 +1,277 @@ +--- +title: "Demystifying event streams: Transforming events into tables with dbt" +description: "Pulling data directly out of application databases is commonplace in the MDS, but also risky. Apps change quickly, and application teams might update database schemas in unexpected ways, leading to pipeline failures, data quality issues, data delivery slow-downs. There is a better way. In this blog post, Charlie Summers (Merit) describes how their organization transforms application event streams into analytics-ready tables, more resilient to event scheme changes." +slug: demystifying-event-streams + +authors: [charlie_summers] + +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-11-04 +is_featured: true +--- + +Let’s discuss how to convert events from an event-driven microservice architecture into relational tables in a warehouse like Snowflake. Here are a few things we’ll address: + +- Why you may want to use an architecture like this +- How to structure your event messages +- How to use dbt macros to make it easy to ingest new event streams + + + +## Event Streams at Merit + +At Merit, we’re building the leading verified identity platform. One key focus of our platform is data quality. Quality problems lead to first responders unable to check into disaster sites or parents unable to access ESA funds. In this blog post we’ll dive into how we tackled one source of quality issues: directly relying on upstream database schemas. + +Under the hood, the Merit platform consists of a series of microservices. Each of these microservices has its own database. We use Snowflake as our data warehouse where we build dashboards both for internal use and for customers. + +![](/img/blog/2022-10-24-demystifying-event-streams/merit-platform.png) + +In the past we relied upon an ETL tool (Stitch) to pull data out of microservice databases and into Snowflake. This data would become the main dbt sources used by our report models in BI. + +![](/img/blog/2022-10-24-demystifying-event-streams/merit-platform-stitch.png) + +This approach worked well, but as engineering velocity increased, we came up with a new policy that required we rethink this approach: **no service should directly access another microservice’s database**. This rule empowers microservices to change their database schemas however they like without worrying about breaking other systems. + +Modern tools like Fivetran and Stitch can flexibly handle schema changes - for example, if a new column is created they can propagate that creation to Snowflake. However, BI tools and dbt models aren’t typically written this way. For example, if a column your BI tool filters on has a name change in the upstream database, that filter will become useless and customers will complain. + +The approach we used before required over-communicating about schema changes. Engineers would need to talk to Data before any change or it could risk a data outage. Tools that provide column-level lineage can improve detecting how schema changes affect dashboards. But a migration is still required should a used column be updated by a schema change. + +This old approach frequently resulted in either busted dashboards or delayed schema changes. These issues were the exact reason engineering implemented the new policy. + +The core challenge is contractual: in our old approach the contract between engineering and data was the database schema. But the database schema was intended to be a tool to help the microservice efficiently store and query data, not a contract. + +So our solution was to start using an intentional contract: **Events**. + +What are Events? Events are facts about what happened within your service. For example, somebody logged in or a new user was created. At Merit (and at many companies), we use an Event-Driven Architecture. That means that microservices primarily communicate information through events, often leveraging messaging platforms like Kafka. + +![](/img/blog/2022-10-24-demystifying-event-streams/merit-platform-kafka.png) + +Microservices consume messages from others that they’re interested in. We choose to use **thick messages** that store as much information as possible about each event - this means that consuming microservices can store and refer to event data instead of requesting fresh data from microservices. For distributed systems nerds: this improves Availability at the cost of Consistency. + +Event schemas can still change, just like database schemas, but the expectation is that they are already a contract between this microservice and other systems. And the sole intention of events is to be this contract - unlike database schemas which are also used by microservices internally to store and query data. So, when an event schema changes, there already is a meeting between that team and all teams that consume the event - now Data is just another team at the meeting. + +## Events as Contracts + +Each event output by a microservice is inserted into a single Kafka topic with a well-defined schema. This schema is managed as part of the [Kafka Schema Registry](https://docs.confluent.io/platform/current/schema-registry/index.html). The Schema Registry doesn’t strictly enforce that events comply with the topic’s schema, but any microservice that produces an event that does not comply with the schema will cause downstream failures - a high-priority bug. These bad events are replayed with the correct schema when the microservice is fixed. + +We use [Avro](https://avro.apache.org/) to encode all of our event schemas. We also tried out [Protobuf](https://developers.google.com/protocol-buffers), but found that the Avro tooling was a bit better for Kafka. + +Event schema design (what should the data contract be?) is a deep topic that we can only touch on briefly here. At a high level, we must design for change. A schema will almost always be tweaked and tuned over time as your product changes. + +As an example, consider a LicenseCreated event. The internal License data model might have several boolean fields in its schema such as IsValid, IsCurrent, IsRestricted, etc. We would recommend instead modeling a License with a single Status field that has a VARCHAR representing the status of the License. New values are easier to add to a VARCHAR than adding or removing boolean fields. + +One very useful feature of the Kafka Schema Registry is it can restrict changes that aren’t compatible with old schema versions. For example, if a data type is changed from an INT to a VARCHAR it will throw an error as the new schema is added. This can be an extra line of defense as schemas change. [Read more about this awesome feature here](https://docs.confluent.io/platform/current/schema-registry/avro.html). + +## OMG Contract + +So we started consuming events from Kafka into Snowflake using [Kafka’s Snowflake Connector](https://docs.snowflake.com/en/user-guide/kafka-connector.html). + +![](/img/blog/2022-10-24-demystifying-event-streams/merit-platform-kafka-load.png) + +The Snowflake Connector creates a new for every Kafka topic and adds a new row for every event. In each row there’s a record_metadata column and a record_content column. Each column is a variant type in Snowflake. + +![](/img/blog/2022-10-24-demystifying-event-streams/kafka-topic-table.png) + +Since we use **thick messages** we actually can consider ourselves done. The messages have as much information as the underlying database, so we could make queries against tables like the above. + +However, working with these blobs is much less convenient than a relational table for the following reasons: + +1. There may be multiple topics related to the same domain model (ex: Users or Customers). So there may be a CustomerCreated topic, a CustomerDeleted topic, a CustomerUpdated topic, and so on. We need to know to join between these tables to determine what the latest Customer data is. +1. We must know whether an event implies a create, an update, or a delete. +1. We must be aware of the ordering of events - the latest update will include the most up-to-date state unless there’s a delete. This can lead to some gnarly time logic that must be considered across all models. + 1. One challenge is partial updates - we disallow those currently so that we never need to recreate the state of a domain model across multiple json blobs. + 1. Distributed systems folks will identify another problem: relying on timing. Due to clock skew, we can’t guarantee that event A’s timestamp being earlier than another B’s means that A occurred before B. If both messages are sent on the same Kafka topic then Kafka can ensure ordering (if configured properly), but we don’t want to limit all events to using the same topic. So we choose to ignore this problem since we have relatively low traffic and low machine volume compared to the Googles and Facebooks of the world. We can also verify the likelihood of clock skew affecting our data by looking for events with the same identifying ID happening within the same second - it doesn’t happen often for us. + +Instead of repeatedly working with the above challenges, we decided to create a relational layer on top of the raw event streams. This takes the form of [dbt macros](https://docs.getdbt.com/docs/building-a-dbt-project/jinja-macros) that handle all of the above problems. + +In order to make the dbt macros easier to write, we requested that engineering add some metadata to all of their events. This formalized the contract between engineering and data - any domain models that don’t comply with the contract will not be able to be used in reports unless the engineering team themself builds a custom pipeline. We named this the Obvious Model Generation (OMG) Contract since providing the metadata leads to obvious domain model generation. And we liked the acronym. + +The OMG contract states that every Kafka message related to a domain model: +1. Must have its topic name added to a dbt variable associated with that domain model in our dbt_project.yml +1. Must have a single uniquely identifying field for each object. We provide a default - id - and a way to override it in our dbt_project.yml. We currently disallow composite ids, but they wouldn’t be too hard to support in the future. +1. Must have a field `changeType` set to one of the following values: INSERT, UPDATE, DELETE. +1. If an INSERT or UPDATE, it must specify a field **data** that encodes the state of the domain model object after the change. +1. If a DELETE, it must specify a field `deletedID` that is set to the identifying field for the deleted domain model object. + +We now can run obvious model generation streams processing on all data that complies with the OMG contract. + +![](/img/blog/2022-10-24-demystifying-event-streams/omg-contract.png) + +## Generic table pipelines via dbt macros + +After solidifying the OMG contract, we built the macros to execute obvious model generation. We wanted to make these as generic as possible while also following good engineering practices. We ended up building three macros that together process event streams into tables. All three macros take in `streams_var` - a list of all the event stream tables related to this domain model. We pull streams_var in from dbt_project.yml. We also take in `streams_schema` which defaults to ‘streams’ but allows overriding for our internal testing. + +The first model is called `stream_model_extract_columns` which iterates through every row in the event stream tables to identify all of the columns that will be part of the domain model table. + +```sql +{%- macro stream_model_extract_columns_macro(streams_var, streams_schema='streams') -%} + +SELECT DISTINCT + CONCAT('DATA:', KEY, ' ', 'AS', ' ', UPPER(e.KEY)) AS COLUMN_NAME +FROM +( +{% for stream in streams_var %} + SELECT + '{{ stream }}' as streamName, + RECORD_CONTENT:data AS data + FROM {{ source(streams_schema, stream ) }} + {%- if not loop.last %} UNION ALL{% endif -%} +{% endfor %} +), LATERAL FLATTEN( INPUT => data ) AS e + +{%- endmacro -%} +``` + +The second macro is called `stream_model_latest_snapshot`. It includes the logic to identify the latest state of every domain model object in the table, applying deletes when it finds them. + +```sql +{%- macro stream_model_latest_snapshot_macro(streams_var, streams_schema='streams') -%} +{%- set identityFields = var("overriddenIdentityFields") -%} + +WITH changeStream AS ( +{% for stream in streams_var %} + SELECT + '{{ stream }}' as streamName, + -- Need to alias ID column here to custom column if its overwritten in the variable + RECORD_CONTENT:data.{{ identityFields.get(stream,'id') }} AS idCol, + RECORD_METADATA:CreateTime AS createTime, + RECORD_CONTENT:changeType::STRING AS changeType, + RECORD_CONTENT:data AS data, + GET(RECORD_CONTENT,'deletedID') AS deletedID + FROM {{ source(streams_schema, stream ) }} + {%- if not loop.last %} UNION ALL{% endif -%} +{% endfor %} +), + +orderedStream AS ( + SELECT + cs.* + , cs.deletedID IN (SELECT deletedID FROM changeStream WHERE changeType = 'DELETE') AS isDeleted + , ROW_NUMBER() OVER (PARTITION BY cs.idCol ORDER BY cs.createTime DESC, cs.changeType DESC) AS LatestRow + FROM changeStream AS cs + WHERE changeType IN ('INSERT', 'UPDATE') +), +selectedStream AS ( + SELECT + * + FROM orderedStream + WHERE LatestRow = 1 +) + +{%- endmacro -%} +``` + +The final macro is called `stream_model` and it coordinates the usage of the first two. Particularly, it uses [run_query()](https://docs.getdbt.com/reference/dbt-jinja-functions/run_query) to run the first macro, then uses the results to execute the final query which leverages the second macro. + +```sql +{%- macro stream_model_macro(streams_var, streams_schema='streams') -%} + +{%- set column_name_query -%} +{{ stream_model_extract_columns_macro(streams_var, streams_schema) }} +{%- endset -%} + +{%- set results = run_query(column_name_query) -%} + +{% if execute %} +{# Return the first column #} +{%- set column_names = results.columns[0].values() -%} +{% else %} +{%- set column_names = [] -%} +{% endif %} + +{{ stream_model_latest_snapshot_macro(streams_var, streams_schema) }} +, +dynamicStream AS ( + SELECT + {# rendering_a_new_line_in_sql_block_code #} + {%- for columns in column_names -%} + {{ ", " if not loop.first }}{{columns}} + {%- if not loop.last -%} + {# rendering_a_new_line_in_sql_block_code #} + {% endif %} + {%- endfor %} + FROM selectedStream AS e +) +SELECT * FROM dynamicStream + +{%- endmacro -%} +``` + +Now all we need to do is call the final macro in a dbt model and provide the list specified as a variable in `dbt_project.yml`. This file is in `src_container.sql`: + +```sql +{{ stream_model_macro(var('container')) }} +``` + +In `src_container.yml` we explicitly set and have tests for the columns we expect to be associated with this model. This is the first time we introduce the actual column names anywhere in our dbt code. + +```yaml +--- +version: 2 + +models: + - name: src_container + description: pass the OMG model variable to generate the data + columns: + - name: templateName + description: STRING Specifies the templateName + tests: + - not_null + - name: complete + description: STRING Specifies the complete + - name: aggregateID + description: STRING Specifies the aggregateID + - name: recipientID + description: STRING Specifies the recipientID + - name: templateID + description: STRING Specifies the templateID + - name: templateType + description: STRING Specifies the templateType + - name: state + description: STRING Specifies the state + - name: id + description: STRING Specifies the id + - name: orgID +``` +```yaml +--- +version: 2 + +models: + - name: users + description: Lovely humans that use our app + columns: + - name: id + description: INT The id of this user + tests: + - not_null + - unique + - name: email + description: STRING User's contact email + tests: + - not_null + - name: state + description: STRING The current state of the user + tests: + - accepted_values: + values: + - "active" + - "invited" + - not_null +``` + +## Future ideas + +We learned a lot from both working with event streams and building these macros. + +One consideration that we haven’t discussed yet is [materialization](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/materializations) strategy. Since event stream tables are append-only, this is a natural fit for incremental models. At Merit, we haven’t worked much with incremental models, so we’re opting to start with views. As we roll this out to production models we’ll be doing a ton of performance testing to figure out the perfect materialization strategy for us. + +We also plan on adding a dbt test that alerts whenever the columns of any domain model table changes. This may indicate that an unexpected change has happened to an event schema, which could affect dashboards. + +These were certainly the most complicated dbt macros that we’ve built so far. This has inspired us to build a test framework to make sure that macros work as expected - including features like mocking run_query() calls. We’re considering open sourcing this framework - if you’re interested then let us know! + +## Let's talk! + +We’ve used dbt macros to transform event streams into tables so that we don’t need our data pipelines to rely directly on database schemas. I’ll be talking about this more at Coalesce 2022 - come check out my talk [Demystifying event streams: Transforming events into tables with dbt](https://coalesce.getdbt.com/agenda/demystifying-event-streams-transforming-events-into-tables-with-dbt). You can also reach out to me in the dbt slack (@Charlie Summers) or [LinkedIn](https://www.linkedin.com/in/charliesummers/). diff --git a/website/blog/2022-11-21-wasilas-foundry-experience.md b/website/blog/2022-11-21-wasilas-foundry-experience.md new file mode 100644 index 00000000000..ca28038fd4f --- /dev/null +++ b/website/blog/2022-11-21-wasilas-foundry-experience.md @@ -0,0 +1,53 @@ +--- +title: "A journey through the Foundry: Becoming an analytics engineer at dbt Labs" +description: "The Foundry Program is an apprenticeship at dbt Labs designed to turn data newbies into fully-fledged analytics engineers over the course of six months. As one of the inaugural foundry apprentices, Wasila shares in this blog post her journey into analytics engineering." +slug: wasila-foundry-experience + +authors: [wasila_quader] + +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-11-22 +is_featured: true +--- + +Data is [an industry of sidesteppers](https://analyticsengineers.club/data-education-is-broken/). Most folks in the field stumble into it, look around, and if they like what they see, they’ll build a career here. This is particularly true in the analytics engineering space. Every AE I’ve talked to had envisioned themselves doing something different before finding this work in a moment of serendipity. This raises the question, how can someone become an analytics engineer *intentionally*? This is the question [dbt Labs’ Foundry Program](https://www.getdbt.com/blog/announcing-the-foundry-program/) aims to address. + + + +## About the Foundry + +The Foundry Program is an apprenticeship designed to turn data newbies into fully-fledged analytics engineers over the course of six months. As one of the inaugural foundry apprentices, I’m here to share my journey into analytics engineering along with the takeaways I picked up along the way. + +We’re continuing to improve the program with each iteration but the curriculum for my cohort was split into two parts—three months of training followed by three months of hands-on work. + +## Where I started + +Before diving into the foundry experience, I’d like to tell you a bit about my background before dbt Labs. In my previous job, I had done some very basic work with data in Excel. Prior to dbt, I had also done a data science bootcamp. The first time I heard about analytics engineering was when I saw a post about the foundry program in Code for Philadelphia’s Slack channel. Even as someone who didn’t understand what analytics engineering was, I was struck by dbt Labs’ strong opinions about analytics and data: [there was a vision towards the future informed by lessons from the past](https://www.getdbt.com/blog/of-the-community-by-the-community-for-the-community/) (i.e. reflecting on the history of software engineering). There was a desire to optimize the way data was done, transparency on the plan to get there, and where better to get my feet wet than a company committed to doing analytics in the best way possible? + +## The Foundry journey + +### Ramping up + +My first couple weeks at dbt Labs was a whirlwind of information and discovery. Week two was when I began to understand what analytics engineering really meant: the organization of data. There was a lot to love about it; there was a promise of both the technical and the creative (the code and the problem solving). As someone who loves organizing, analytics engineering was a natural fit. It came with a [KonMari zen](https://docs.getdbt.com/blog/marie-kondo-query-migration). + +I had originally focused my job search on data analytics, but for me, analytics engineering was a much better fit. It felt less like reaching around for a lightbulb moment and more like building a library I can take pride in. + +As my knowledge of the “what” and “why” behind analytics engineering was growing, I started learning the “how”. SQL, Jinja, best practices, and the in’s and out’s of working in a dbt project. The best part was applying my knowledge to exercises. I remember going through my refactored code from an exercise with [Dave Connors](https://docs.getdbt.com/author/dave_connors), my foundry mentor (shout out to Dave! He was a huge help during my apprenticeship). Going through my modeling and the different ways an AE could refactor the code showed me the creative problem solving that this job requires. Often there’s a clear best path in the code. But sometimes there isn’t. Playing with those trade offs made me feel like a kid in a candy store. + +Along the way, I was able to utilize some great resources. My apprenticeship was on our professional services team, who excelled not only in dbt work but in conveying an understanding of dbt and the analytics engineering way of thinking to our clients. Within our team as well as the larger dbt community, there was a culture of sharing perspectives, sharing solutions, and growing as a space. We have [guides on analytics engineering](https://www.getdbt.com/analytics-engineering/start-here), [articles on the MDS ecosystem](https://continual.ai/post/the-modern-data-stack-ecosystem-spring-2022-edition), and [a number of robust writers sharing their latest takes on the field](https://roundup.getdbt.com/). These are invaluable resources for hopeful analytics engineers. + +Of course, I can’t talk about the Community without mentioning Coalesce. My first Coalesce came on the heels of the training section of my apprenticeship, right before I dove into real hands-on consulting work. It was amazing to see so many folks excited and engaged in analytics engineering. Talks ranged from getting-your-hands-dirty technical problems to reflections on the broader industry. [Coalesce 2021](https://www.getdbt.com/coalesce-2021/) reaffirmed for me that the real magic of this field wasn’t dbt, but the Community that had coalesced around it. + +### On the ground + +And then it was time for the real work. I was paired on projects with more senior team members. The need to prove myself gave way to some imposter syndrome. Was I ready? Had I learned enough and was I capable of applying the knowledge when it really came down to it? As is often the case when you shift from an academic application to a practical one, I found that there were challenges I hadn’t anticipated. + +The first project I worked on was a solutions review on a client’s project, where we review the project and suggest where it can be improved, as well as highlighting where it shines. I was armed with dbt Labs’ best practices, but when I first opened up a DAG of over 200 models, I was overwhelmed and didn’t know where to start. That’s when I learned that context gathering (like going through the DAG and project before diving into the work) is a very important part of the job! In the long term, the contributions I made to those initial client engagements were the first step in growing my confidence. + +## Post-Foundry + +Once the foundry wrapped up, I was offered a permanent position on the professional service’s team! I continue to benefit from [the knowledge loop](https://github.com/dbt-labs/corp/blob/main/values.md#we-contribute-to-the-knowledge-loop), but now I’m also able to contribute to it. I’ve worked on more dbt projects. I’ve made package contributions. I’ve gone from being a starry-eyed Coalesce attendee to being a starry-eyed Coalesce attendee *and* [co-facilitating workshops at Coalesce](https://www.youtube.com/watch?v=W3CyTmVYro8). Over a year later, I can happily say that the Foundry program brought me where I wanted to be. + +If you’re looking for resources to help a hopeful analytics engineer (whether you are one or a manager of one), feel free to reach out to me on the community Slack (@Wasila)! \ No newline at end of file diff --git a/website/blog/2022-11-22-move-spreadsheets-to-your-dwh.md b/website/blog/2022-11-22-move-spreadsheets-to-your-dwh.md new file mode 100644 index 00000000000..67f217c76a4 --- /dev/null +++ b/website/blog/2022-11-22-move-spreadsheets-to-your-dwh.md @@ -0,0 +1,195 @@ +--- +title: "How to move data from spreadsheets into your data warehouse" +description: "A thankless, humble, and inevitable task: getting spreadsheet data into your data warehouse. Let's look at some of the different options, and the pros and cons of each." +slug: moving-spreadsheet-data + +authors: [joel_labes] + +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-11-23 +is_featured: true +--- + +Once your is built out, the vast majority of your data will have come from other SaaS tools, internal databases, or customer data platforms (CDPs). But there’s another unsung hero of the analytics engineering toolkit: the humble spreadsheet. + +Spreadsheets are the Swiss army knife of data processing. They can add extra context to otherwise inscrutable application identifiers, be the only source of truth for bespoke processes from other divisions of the business, or act as the translation layer between two otherwise incompatible tools. + +Because of spreadsheets’ importance as the glue between many business processes, there are different tools to load them into your data warehouse and each one has its own pros and cons, depending on your specific use case. + + + +In general, there are a few questions to ask yourself about your data before choosing one of these tools: + +- Who at your company will be loading the data? +- Does it have a consistent format? +- How frequently will it change? +- How big is the dataset? +- Do changes need to be tracked? +- Where are the files coming from? + +Let’s have a look at some of the offerings to help you get your spreadsheets into your data warehouse. + +## dbt seeds + +dbt comes with an inbuilt csv loader ([seeds](https://docs.getdbt.com/docs/building-a-dbt-project/seeds)) to populate your data warehouse with any files you put inside of your project’s `seeds` folder. It will automatically infer data types from your file’s contents, but you can always override it by [providing explicit instructions in your dbt_project.yml](https://docs.getdbt.com/reference/resource-configs/column_types) file. + +However, since dbt creates these tables by inserting rows one at a time, it doesn’t perform well at scale (there’s no hard limit but aim for hundreds of rows rather than thousands). [The dbt docs](https://docs.getdbt.com/docs/building-a-dbt-project/seeds#faqs) suggest using seeds for “files that contain business-specific logic, for example, a list of country codes or user IDs of employees.” + +A big benefit of using seeds is that your file will be checked into source control, allowing you to easily see when the file was updated and retrieve deleted data if necessary. + +#### Good fit for: + +- Small files such as mapping employee identifiers to employees +- Infrequently modified files such as mapping country codes to country names +- Data that would benefit from source control +- Programmatic control of data types + +#### Not a good fit for: + +- Files greater than 1MB in size +- Files that need regular updates + +## ETL tools + +An obvious choice if you have data to load into your warehouse would be your existing [ETL tool](https://www.getdbt.com/analytics-engineering/etl-tools-a-love-letter/) such as Fivetran or Stitch, which I'll dive into in this section. Below is a summary table highlighting the core benefits and drawbacks of certain ETL tooling options for getting spreadsheet data in your data warehouse. + +### Summary table + +| Option/connector | Data updatable after load | Configurable data types | Multiple tables per schema | Good for large datasets | +| --- | --- | --- | --- | --- | +| dbt seeds | ✅ | ✅ | ✅ | ❌ | +| Fivetran Browser Upload | ✅ | ✅ | ✅ | ✅ | +| Fivetran Google Sheets connector | ✅ | ❌ | ❌ | ✅ | +| Fivetran Google Drive connector | ❌ | ❌ | ✅ | ✅ | +| Stitch Google Sheets integration | ✅ | ❌ | ❌ | ✅ | +| Airbyte Google Sheets connector | ✅ | ❌ | ❌ | ✅ | + +### Fivetran browser upload + +[Fivetran’s browser uploader](https://fivetran.com/docs/files/browser-upload) does exactly what it says on the tin: you upload a file to their web portal and it creates a table containing that data in a predefined schema in your warehouse. With a visual interface to modify data types, it’s easy for anyone to use. And with an account type with the permission to only upload files, you don’t need to worry about your stakeholders accidentally breaking anything either. + + + + + +A nice benefit of the uploader is support for updating data in the table over time. If a file with the same name and same columns is uploaded, any new records will be added, and existing records (per the ) will be updated. + +However, keep in mind that there is no source control on these changes or a to revert them; you might want to consider [snapshotting changes](https://docs.getdbt.com/docs/building-a-dbt-project/snapshots) in dbt if that’s a concern. + +Also, Fivetran won’t delete records once they’re created, so the only way to remove records created using this process is by manually [deleting](https://docs.getdbt.com/terms/dml#delete) them from your warehouse. If you have an ad-hoc connector, consider having an automated process to drop these tables regularly, especially if you have PII management concerns. + +#### Good fit for: + +- Files that are frequently updated by someone +- Allowing anyone in the company to upload files +- Ad-hoc data loads +- Updating a table instead of creating a new one +- Basic data type changes (including handling currency columns) +- Larger files + +#### Not a good fit for: + +- Tracking changes to data +- Complex type mappings + +### Fivetran Google Sheets connector + +The main benefit of connecting to Google Sheets instead of a static spreadsheet should be obvious—teammates can change the sheet from anywhere and new records will be loaded into your warehouse automatically. [Fivetran’s Google Sheets connector](https://fivetran.com/docs/files/google-sheets) requires some additional initial configuration, but collaborative editing can make the effort worthwhile. + +Instead of syncing all cells in a sheet, you create a [named range](https://fivetran.com/docs/files/google-sheets/google-sheets-setup-guide) and connect Fivetran to that range. Each Fivetran connector can only read a single range—if you have multiple tabs then you’ll need to create multiple connectors, each with its own schema and table in the target warehouse. When a sync takes place, it will [truncate](https://docs.getdbt.com/terms/ddl#truncate) and reload the table from scratch as there is no primary key to use for matching. + + + +Beware of inconsistent data types though—if someone types text into a column that was originally numeric, Fivetran will automatically convert the column to a string type which might cause issues in your downstream transformations. [The recommended workaround](https://fivetran.com/docs/files/google-sheets#typetransformationsandmapping) is to explicitly cast your types in [staging models](https://docs.getdbt.com/guides/best-practices/how-we-structure/2-staging) to ensure that any undesirable records are converted to null. + +#### Good fit for: + +- Large, long-lived documents +- Files that are updated by many people (and somewhat often) + +#### Not a good fit for: + +- Ad-hoc loads—you need to create an entire schema for every connected spreadsheet, and preparing the sheet is a fiddly process +- Tracking changes to data +- Documents with many tabs + +### Fivetran Google Drive connector + +I’m a big fan of [Fivetran’s Google Drive connector](https://fivetran.com/docs/files/google-drive); in the past I’ve used it to streamline a lot of weekly reporting. It allows stakeholders to use a tool they’re already familiar with (Google Drive) instead of dealing with another set of credentials. Every file uploaded into a specific folder on Drive (or [Box, or consumer Dropbox](https://fivetran.com/docs/files/magic-folder)) turns into a table in your warehouse. + + + +Like the Google Sheets connector, the data types of the columns are determined automatically. Dates, in particular, are finicky though—if you can control your input data, try to get it into [ISO 8601 format](https://xkcd.com/1179/) to minimize the amount of cleanup you have to do on the other side. + +I used two macros in the dbt_utils package ([get_relations_by_pattern](https://github.com/dbt-labs/dbt-utils#get_relations_by_pattern-source) and [union_relations](https://github.com/dbt-labs/dbt-utils#union_relations-source)) to combine weekly exports from other tools into a single [model](https://docs.getdbt.com/docs/building-a-dbt-project/building-models) for easy cleanup in a staging model. Make sure you grant your transformer account permission to access all tables in the schema (including future ones) to avoid having to manually intervene after every new file is uploaded. + +#### Good fit for: + +- Allowing anyone in the company to upload files +- Weekly exports from another tool +- Large files +- Many files (each will be created as another table in a single schema, unlike the Google Sheets integration) + +#### Not a good fit for: + +- Data that needs to be updated after load +- Custom type mappings (without further processing in dbt) + +### Stitch Google Sheets integration + +[The Google Sheets integration by Stitch](https://www.stitchdata.com/docs/integrations/saas/google-sheets) is a little more straightforward to set up than Fivetran’s as it imports the entire sheet without requiring you to configure named ranges. Beyond that, it works in the same way with the same benefits and the same drawbacks. + +#### Good fit for: + +- Large, long-lived documents +- Files that are updated by many people + +#### Not a good fit for: + +- Ad-hoc loads—you need to create an entire schema for every connected spreadsheet +- Tracking changes to data +- Documents with many tabs + +### Airbyte Google Sheets connector + +Airbyte, an open source and cloud ETL tool, [supports a Google Sheets source connector](https://airbytehq.github.io/integrations/sources/google-sheets/) very similar to Stitch’s and Fivetran’s integration. You’ll need to authenticate your Google Account using an OAuth or a service account key and provide the link of the Google Sheet you want to pull into your data warehouse. Note that all sheet columns are loaded as strings, so you will need to explicitly cast them in a downstream model. Airbyte’s connector here also supports both full refreshes and appends. + +#### Good fit for: + +- Large, long-lived documents +- Files that are updated by many people +- Teams that may be on a budget + +#### Not a good fit for: + +- Non-string type data you want preserved in your raw source tables in your data warehouse + +## Native warehouse integrations + +Each of the major data warehouses also has native integrations to import spreadsheet data. While the fundamentals are the same, there are some differences amongst the various warehousing vendors. + +### Snowflake + +Snowflake’s options are robust and user-friendly, offering both a [web-based loader](https://docs.snowflake.com/en/user-guide/data-load-web-ui.html) as well as [a bulk importer](https://docs.snowflake.com/en/user-guide/data-load-bulk.html). The web loader is suitable for small to medium files (up to 50MB) and can be used for specific files, all files in a folder, or files in a folder that match a given pattern. It’s also the most provider-agnostic, with support for Amazon S3, Google Cloud Storage, Azure and the local file system. + + + +### BigQuery + +BigQuery only supports importing data from external sources hosted by Google such as Google Drive and Google Cloud Storage (as BigQuery and Sheets are both Google products, BigQuery is the only platform on this list that has a native integration that doesn't require 3rd-party tooling). The data it references isn’t copied into BigQuery but can be referenced in queries as though it was. If needed, you can write a copy to BigQuery or just leave it as an external source. The team at supercooldata has written [a great how-to guide on setting up Google Sheets with BigQuery](https://blog.supercooldata.com/working-with-sheets-in-bigquery/). + +### Redshift + +Unsurprisingly for an AWS product, Redshift prefers to [import CSV files from S3](https://docs.aws.amazon.com/redshift/latest/dg/tutorial-loading-data.html). As with Snowflake, this is achieved with the COPY command, and you can easily control which file(s) are imported from the source bucket. Using S3 as a source compared to a web-based loader or Google Drive means this option isn’t as user-friendly for non-technical folks, but is still a great option to sync files that are automatically generated from other tools. + +### Databricks + +Databricks also supports [pulling in data, such as spreadsheets, from external cloud sources](https://docs.databricks.com/external-data/index.html) like Amazon S3 and Google Cloud Storage. In addition, the ability to [load data via a simple UI](https://docs.databricks.com/ingestion/add-data/index.html) within Databricks is currently in public preview. + +## Conclusion + +Beyond the options we’ve already covered, there’s an entire world of other tools that can load data from your spreadsheets into your data warehouse. This is a living document, so if your preferred method isn't listed then please [open a PR](https://github.com/dbt-labs/docs.getdbt.com) and I'll check it out. + +The most important things to consider are your files’ origins and formats—if you need your colleagues to upload files on a regular basis then try to provide them with a more user-friendly process; but if you just need two computers to talk to each other, or it’s a one-off file that will hardly ever change, then a more technical integration is totally appropriate. \ No newline at end of file diff --git a/website/blog/2022-11-30-dbt-project-evaluator.md b/website/blog/2022-11-30-dbt-project-evaluator.md new file mode 100644 index 00000000000..0ab3c5d2b31 --- /dev/null +++ b/website/blog/2022-11-30-dbt-project-evaluator.md @@ -0,0 +1,123 @@ +--- +title: "Introducing the dbt_project_evaluator: Automatically evaluate your dbt project for alignment with best practices " +description: "The dbt_project_evaluator is a dbt package created by the Professional Services team at dbt Labs to help analytics engineers automatically audit their dbt projects for bad practices. Goodbye auditing nightmares, hello beautiful DAG." +slug: align-with-dbt-project-evaluator + +authors: [grace_goheen] + +tags: [analytics craft] +hide_table_of_contents: false + +date: 2022-11-30 +is_featured: true +--- + +## Why we built this: A brief history of the dbt Labs Professional Services team + +If you attended [Coalesce 2022](https://www.youtube.com/watch?v=smbRwmcM1Ok), you’ll know that the secret is out — the dbt Labs Professional Services team is not just [a group of experienced data consultants](https://www.getdbt.com/dbt-labs/services/); we’re also an intergalactic group of aliens traveling the Milky Way on a mission to enable analytics engineers to successfully adopt and manage dbt throughout the galaxy. + + + +Don’t believe me??? Here’s photographic proof. + + + +Since the inception of dbt Labs, our team has been embedded with a variety of different data teams — from an over-stretched-data-team-of-one to a data-mesh-multiverse. + +Throughout these engagements, we began to take note of the common issues many analytics engineers face when scaling their dbt projects: + +- No alerts when data models produce incorrect outputs +- Long execution times when building or querying a model +- Duplicated code and differing metric definitions across teams +- Lack of knowledge of what a model or field represents +- Wasted developer time locating and reading through messy SQL files + +Maybe your team is facing some of these issues right now 👀 And that’s okay! We know that building an effective, scalable dbt project takes a lot of effort and brain power. Maybe you’ve inherited a legacy dbt project with a mountain of tech debt. Maybe you’re starting from scratch. Either way it can be difficult to know the best way to set your team up for success. Don’t worry, you’re in the right place! + +Through solving these problems over and over, the Professional Services team began to hone our best practices for working with dbt and how analytics engineers could improve their dbt project. We added “solutions reviews'' to our list of service offerings — client engagements in which we evaluate a given dbt project and provide specific recommendations to improve performance, save developer time, and prevent misuse of dbt’s features. And in an effort to share these best practices with the wider dbt community, we developed a *lot* of content. We wrote articles on the Developer Blog (see [1](https://docs.getdbt.com/blog/on-the-importance-of-naming), [2](https://discourse.getdbt.com/t/your-essential-dbt-project-checklist/1377), and [3](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview)), gave [Coalesce talks](https://www.getdbt.com/coalesce-2020/auditing-model-layers-and-modularity-with-your-dag/), and created [training courses](https://courses.getdbt.com/courses/refactoring-sql-for-modularity). + +TIme and time again, we found that when teams are aligned with these best practices, their projects are more: + +- **U**sable: Data outputs are reliable with proper alerting in place +- **F**ast: Jobs are more efficient without long-running model bottlenecks +- **O**rganized: Developers can quickly find, read, and understand the code they need to update +- **S**calable: No more "black holes", duplicated code is eliminated allowing your project to grow with ease + +Even with all of these great resources, evaluating a dbt project still took considerable upfront development time to discover exactly where and how to apply these best practices. + +**That’s when we came up with a space-altering idea: what if we could compress all of our ideas about best practices into a single, actionable tool to automate the process of discovering these misalignments, so that analytics engineers could immediately understand exactly where their projects deviated from our best practices and *be empowered to improve their projects on their own*.** + +Flash forward through a six month long development process… + +The [dbt_project_evaluator](https://github.com/dbt-labs/dbt-project-evaluator) was born: a dbt package that uses the shared language of SQL, models, and tests to identify and assert specific recommendations for a given dbt project. + +## How the `dbt_project_evaluator` package works + +When you install and run this package in your own dbt project, it will: + +1. Convert the [graph](https://docs.getdbt.com/reference/dbt-jinja-functions/graph) object — which is a variable that contains information about the nodes in your dbt project — into a query-able table. This enables us to write SQL queries against a tabular representation of your . +2. Capture each misalignment of an established “best practice” in a dbt model. +3. Test these new models to alert you to the presence of misalignments in your dbt project. + +Currently, the dbt_project_evaluator package covers five main categories: + +| Category | Example Best Practices | +| --- | --- | +| Modeling | - Every [raw source](https://docs.getdbt.com/docs/build/sources) has a one-to-one relationship with a [staging model](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview) to centralize data cleanup.
- Every model can be traced back to a declared source in the dbt project (i.e. no "root" models).
- End-of-DAG fanout remains under a specified threshold. | +| Testing | - Every model has a that is appropriately tested.
- The percentage of models that have minimum 1 test applied is greater than or equal to a specified threshold. | +| Documentation | - Every model has a [description](https://docs.getdbt.com/reference/resource-properties/description).
- The percentage of models that have a description is greater than or equal to a specified threshold. | +| Structure | - All models are named with the appropriate prefix aligned according to their model types (e.g. staging models are prefixed with `stg_`).
- The sql file for each model is in the subdirectory aligned with the model type (e.g. intermediate models are in an [intermediate subdirectory](https://docs.getdbt.com/guides/best-practices/how-we-structure/3-intermediate)).
- Each models subdirectory contains one .yml file that includes tests and documentation for all models within the given subdirectory. | +| Performance | - Every model that directly feeds into an [exposure](https://docs.getdbt.com/docs/build/exposures) is materialized as a .
- No models are dependent on chains of "non-physically-materialized" models greater than a specified threshold. | + +For the full up-to-date list of covered rules, check out the package’s [README](https://github.com/dbt-labs/dbt-project-evaluator#rules-1), which outlines for each misalignment of a best practice: + +- Definition and clarifying example +- Reason for flagging the misalignment +- Any known exceptions to the rule +- How to remediate the issue + +There might be specific situations where you need to depart from our best practices. *That’s actually okay*, as long as you’ve reviewed the misalignment and made the active choice to do something different. We built this tool with simple mechanisms to customize the package behavior, including: + +- Disabling a package model to exclude a best practice from the entire evaluation process +- Overriding variables to adjust *how* a best practice is evaluated +- Documenting specific project exceptions to a best practice in a seed file + +For instructions and code snippets for each customization method, check out the [README](https://github.com/dbt-labs/dbt-project-evaluator#customization-1). + +## Try it out! + +To try out the package in your own project: + +1. **Install the package**: Check [dbt Hub](https://hub.getdbt.com/dbt-labs/dbt_project_evaluator/latest/) for the latest installation instructions, or read [the docs](https://docs.getdbt.com/docs/build/packages) for more information on installing packages. +2. **Run and test all of the models in the package**: Execute a `dbt build --select package:dbt_project_evaluator` command. +3. **Identify any warnings**: Each test warning indicates the presence of a type of misalignment. + +For *each warning* that pops up: + +1. Identify the model name. +2. Locate the related documentation in the package [README](https://github.com/dbt-labs/dbt-project-evaluator#rules-1). +3. Query the model to find the specific instances of the issue within your project. +4. Either fix the issue(s) or [customize](https://github.com/dbt-labs/dbt-project-evaluator#customization-1) the package to exclude the issue(s). + +In order to automatically maintain project quality as your team expands, you can enforce alignment with dbt Lab’s best practices on all future code changes by [adding this package as a CI check](https://github.com/dbt-labs/dbt-project-evaluator#running-this-package-as-a-ci-check-1). Every time one of your team members (or yourself) opens a PR, the CI check will automatically ensure that new code changes don’t introduce new misalignments. + +You can think of this as “linting” your dbt project to keep it aligned with our best practices — in the same way you might lint your SQL code to keep it aligned with your style guide. + +To add this package as a CI check: + +1. Override the severity of your tests using an [environment variable](https://docs.getdbt.com/docs/build/environment-variables). +2. Run this package as a step in your CI job. + +To watch a full demo of using this package in greater detail, make sure to check out [my Coalesce talk below](https://youtu.be/smbRwmcM1Ok) [demo starts at 7:35]. + +
+ +
+ +
+ +If something isn’t working quite right or you have ideas for future functionality, [open an issue in the Github repository](https://github.com/dbt-labs/dbt-project-evaluator/issues) or even contribute code of your own! + +Together, we can ensure that dbt projects across the galaxy are set up for success as they grow to infinity and beyond. + + \ No newline at end of file diff --git a/website/blog/authors.yml b/website/blog/authors.yml index d2b2b3c7f67..b8437601b5b 100644 --- a/website/blog/authors.yml +++ b/website/blog/authors.yml @@ -97,7 +97,7 @@ claire_carroll: dave_connors: name: Dave Connors - job_title: Analytics Engineer + job_title: Senior Analytics Engineer organization: dbt Labs image_url: /img/blog/authors/dconnors.jpeg links: @@ -181,7 +181,7 @@ simon_podhajsky: job_title: Data Lead organization: iLife Technologies description: > - Simon Podhajsky is a lapsed neuroscientist turned data everything at iLife + Simon Podhajsky is a lapsed neuroscientist turned data everything at iLife Technologies, a startup that seeks to digitize the life insurance agency. image_url: /img/blog/authors/simon-podhajsky.jpeg links: @@ -207,7 +207,7 @@ josh_fell: links: - url: https://www.linkedin.com/in/josh-fell/ icon: fa-linkedin - + simo_tumelius: name: Simo Tumelius job_title: Freelance Data and Analytics Engineer @@ -215,7 +215,7 @@ simo_tumelius: links: - url: https://www.linkedin.com/in/simo-tumelius-00a27a162/ icon: fa-linkedin - + matt_winkler: name: Matt Winkler job_title: Senior Solutions Architect @@ -225,6 +225,18 @@ matt_winkler: links: - url: https://www.linkedin.com/in/matt-winkler-4024263a/ +jonathan_natkins: + name: Jon "Natty" Natkins + job_title: Regional Director, Solutions Architecture + organization: dbt Labs + description: Natty also writes about startups, equity, data, and more in his Substack called [Semi-Structured](http://semistructured.substack.com/). + image_url: /img/blog/authors/jonathan-natkins.jpeg + links: + - url: https://www.linkedin.com/in/nattyice/ + icon: fa-linkedin + - url: https://twitter.com/nattyice + icon: fa-twitter + lauren_benezra: name: Lauren Benezra job_title: Analytics Engineer @@ -239,3 +251,122 @@ christine_berger: job_title: Senior Analytics Engineer organization: dbt Labs image_url: /img/blog/authors/christine-berger.jpeg + +grace_goheen: + name: Grace Goheen + job_title: Analytics Engineer + organization: dbt Labs + image_url: /img/blog/authors/grace-goheen.jpeg + links: + - url: https://www.linkedin.com/in/gracegoheen/ + icon: fa-linkedin + +jeremy_cohen: + name: Jeremy Cohen + job_title: Product Manager + organization: dbt Labs + image_url: /img/blog/authors/jerco.jpeg + +doug_beatty: + name: Doug Beatty + job_title: Senior Developer Experience Advocate + organization: dbt Labs + image_url: /img/blog/authors/dbeatty.jpeg + +callum_mccann: + name: Callum McCann + job_title: Senior Developer Experience Advocate + organization: dbt Labs + description: Callum works on metrics and is either talking about that or obsessing about his dog. + image_url: /img/blog/authors/callum-mccann.jpg + links: + - url: https://www.linkedin.com/in/callum-mccann-38628a89/ + icon: fa-linkedin + - url: https://twitter.com/callumpmccann + icon: fa-twitter + +benoit_perigaud: + name: Benoit Perigaud + job_title: Senior Analytics Engineer + organization: dbt Labs + image_url: /img/blog/authors/benoit-perigaud.jpeg + links: + - url: https://www.linkedin.com/in/benoit-perigaud/ + icon: fa-linkedin + +bennie_regenold: + name: Bennie Regenold + job_title: Analytics Engineer + organization: dbt Labs + image_url: /img/blog/authors/bennie-regenold.png + links: + - url: https://www.linkedin.com/in/benjaminregenold/ + icon: fa-linkedin + +barr_yaron: + name: Barr Yaron + job_title: Product Manager + organization: dbt Labs + image_url: /img/blog/authors/barr-yaron.png + links: + - url: https://www.linkedin.com/in/barryaron/ + icon: fa-linkedin + +ian_fahey: + name: Ian Fahey + job_title: Analytics Engineer + organization: dbt Labs + image_url: /img/blog/authors/ian-fahey.png + links: + - url: https://www.linkedin.com/in/ianmfahey/ + icon: fa-linkedin + - url: https://twitter.com/Cavorax + icon: fa-twitter + +joe_markiewicz: + name: Joe Markiewicz + job_title: Analytics Engineering Manager (Fivetran dbt package maintainer) + description: Joe is a dbt package maintainer/manager at Fivetran by day, and a cat obsessed video game developer by night. Actually, Joe is cat obsessed all day. + organization: Fivetran + image_url: /img/blog/authors/joe-markiewicz.jpeg + links: + - url: https://www.linkedin.com/in/joseph-markiewicz-8224a990/ + icon: fa-linkedin + - url: https://twitter.com/JoeMarkiewicz17 + icon: fa-twitter + +yu_ishikawa: + name: Yu Ishikawa + job_title: Senior Data Privacy Engineer + organization: Ubie + image_url: /img/blog/authors/yu-ishikawa.jpg + links: + - url: https://www.linkedin.com/in/yuishikawa0301 + icon: fa-linkedin + +brittany_krauth: + name: Brittany Krauth + job_title: Manager, Analytics & Insights + description: Brittany Krauth works as Manager, Analytics & Insights supporting Degreed's upskilling platform. Brittany is passionate about building a company-wide data-driven culture. She has worked in various analytical roles, from a focus on predictive analytics to data visualization to process improvements. In addition, she holds a BS in Industrial Engineering from Georgia Tech. In her spare time, Brittany competes in dog agility and trains donkeys. + organization: Degreed + image_url: /img/blog/authors/brittany-krauth.png + links: + - url: https://www.linkedin.com/in/brittanykrauth + icon: fa-linkedin + +charlie_summers: + name: Charlie Summers + job_title: Staff Software Engineer + description: Charlie is the Data Engineer Tech Lead at Merit. He introduced Merit to dbt and it's been a fantastic fit for a wide variety of data pipelines. He likes thinking about the future of data - integrating event streams, analyzing encrypted data, capturing fine-grained lineage, and making it easy to build simple apps on top of data warehouses/lakes. + organization: Merit + image_url: /img/blog/authors/charlie-summers.jpeg + links: + - url: https://www.linkedin.com/in/charliesummers + icon: fa-linkedin + +wasila_quader: + name: Wasila Quader + job_title: Associate Analytics Engineer + description: After a winding road through healthcare spreadsheets and data science projects, Wasila discovered analytics engineering as an apprentice of dbt Labs' Foundry Program. She now works as an analytics engineer on dbt Labs' professional services team. + organization: dbt Labs + image_url: /img/blog/authors/wasila-quader.png diff --git a/website/blog/categories.yml b/website/blog/categories.yml index cd486b0bdef..2a45e6529e2 100644 --- a/website/blog/categories.yml +++ b/website/blog/categories.yml @@ -15,10 +15,6 @@ display_title: dbt tutorials description: Best practices in the usage of our favorite data transformation tool. is_featured: true -- name: release notes - display_title: Release notes - description: Notable updates and new features in dbt Cloud. - is_featured: true - name: dbt updates display_title: dbt product updates description: An archive of monthly product updates from the dbt Labs team. diff --git a/website/blog/ctas.yml b/website/blog/ctas.yml index 1d819628275..2267b05a42a 100644 --- a/website/blog/ctas.yml +++ b/website/blog/ctas.yml @@ -9,5 +9,4 @@ header: "Just Getting Started?" subheader: Check out guides on getting your warehouse set up and connected to dbt Cloud. button_text: Learn more - url: https://docs.getdbt.com/tutorial/getting-set-up - + url: https://docs.getdbt.com/docs/get-started/getting-started/overview diff --git a/website/blog/maching-learning-dbt-baton-pass.md b/website/blog/maching-learning-dbt-baton-pass.md index 9eed5238009..7046ecb296a 100644 --- a/website/blog/maching-learning-dbt-baton-pass.md +++ b/website/blog/maching-learning-dbt-baton-pass.md @@ -42,7 +42,7 @@ This happens because the “normal” way of doing things lacks long-term & expl ### Here’s what happened -After some initial planning, I knew we had this raw data living somewhere in our data warehouse. It was easy to make sense of this starting point for our work together. I wrote dbt transformations to massage this raw data and joined a couple tables together based on intuition of what variables mattered: daily active usage, number of users, amount paid, historical usage, etc. +After some initial planning, I knew we had this raw data living somewhere in our . It was easy to make sense of this starting point for our work together. I wrote dbt transformations to massage this raw data and joined a couple tables together based on intuition of what variables mattered: daily active usage, number of users, amount paid, historical usage, etc. The ML engineer stepped in from here. She was used to doing her statistics and preprocessing in python [pandas](https://pandas.pydata.org/) and [scikit-learn](https://scikit-learn.org/stable/index.html). Before she opened up her Jupyter notebook, we had a heart-to-heart conversation and realized the same work could be done through dbt. Preprocessing could be done through this [open source dbt package](https://github.com/omnata-labs/dbt-ml-preprocessing/tree/1.1.0/#dbt-ml-preprocessing) and there were plenty of others like it in the [package registry](https://hub.getdbt.com/). @@ -106,7 +106,7 @@ Also, I would create a data app (in Hex) where users plug in different input sce #### What are the tradeoffs? -I’d still have to export my predictive results back to the database and configure them as sources for dbt docs(depends if Modelbit is involved). People wouldn’t know at a glance the data lineage to power this notebook. But my gut tells me the tradeoff would be worth it because the ML engineer knows where to start problem solving even if the solution wasn’t readily available through SQL. +I’d still have to export my predictive results back to the database and configure them as sources for dbt docs(depends if Modelbit is involved). People wouldn’t know at a glance the data lineage to power this notebook. But my gut tells me the tradeoff would be worth it because the ML engineer knows where to start problem solving even if the solution wasn’t readily available through SQL. ### Bring machine learning to the SQL workflow diff --git a/website/cypress.config.js b/website/cypress.config.js new file mode 100644 index 00000000000..0ce0dfeda3c --- /dev/null +++ b/website/cypress.config.js @@ -0,0 +1,12 @@ +const { defineConfig } = require("cypress"); + +module.exports = defineConfig({ + e2e: { + setupNodeEvents(on, config) { + // implement node event listeners here + }, + baseUrl: 'https://docs.getdbt.com', + chromeWebSecurity: false, + video: false + }, +}); diff --git a/website/cypress/e2e/docs.cy.js b/website/cypress/e2e/docs.cy.js new file mode 100644 index 00000000000..50f0ba09dae --- /dev/null +++ b/website/cypress/e2e/docs.cy.js @@ -0,0 +1,543 @@ +describe('docs.getdbt.com docs tab', () => { + before(function () { + Cypress.on('uncaught:exception', (err, runnable) => { + // returning false here prevents Cypress from + // failing the test + return false; + }); + }); + + beforeEach(() => { + cy.visit('/docs/introduction'); + }); + + it('verifies all the introduction page links work and go to the correct pages', () => { + cy.get(':nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/supported-data-platforms`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(1) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/core-versions`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the building a dbt project page links work and go to the correct pages', () => { + cy.get('.theme-doc-sidebar-menu > :nth-child(2) > :nth-child(1) > .menu__link').click() + cy.get(':nth-child(2) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/projects`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(2) > .menu__list > .theme-doc-sidebar-item-category > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > .theme-doc-sidebar-item-category > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/building-models`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > .theme-doc-sidebar-item-category > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/building-models/materializations`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > .theme-doc-sidebar-item-category > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/building-models/configuring-incremental-models`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > .theme-doc-sidebar-item-category > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/building-models/using-custom-aliases`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > .theme-doc-sidebar-item-category > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/building-models/using-custom-schemas`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > .theme-doc-sidebar-item-category > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/building-models/using-custom-databases`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > .theme-doc-sidebar-item-category > .menu__list > :nth-child(7) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/building-models/using-variables`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/tests`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/documentation`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/using-sources`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/seeds`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(7) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/snapshots`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(8) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/exposures`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(9) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/jinja-macros`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(10) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/hooks-operations`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(11) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/package-management`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(12) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/analyses`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 350ms ease-in-out 0s;"] > :nth-child(13) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/building-a-dbt-project/metrics`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the running a dbt project page links work and go to the correct pages', () => { + cy.get(':nth-child(3) > .menu__list-item-collapsible > .menu__link').click() + cy.get(':nth-child(3) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/running-a-dbt-project/using-the-dbt-ide`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(3) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/running-a-dbt-project/using-the-cli`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(3) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/running-a-dbt-project/dbt-api`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(3) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/running-a-dbt-project/running-dbt-in-production`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the contributing page links work and go to the correct pages', () => { + cy.get(':nth-child(4) > .menu__list-item-collapsible > .menu__link').click() + cy.get(':nth-child(4) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/contributing/oss-expectations`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(4) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/contributing/contributor-license-agreements`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(4) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/contributing/building-a-new-adapter`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(4) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/contributing/testing-a-new-adapter`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(4) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/contributing/documenting-a-new-adapter`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(4) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/contributing/slack-rules-of-the-road`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the about page links work and go to the correct pages', () => { + cy.get(':nth-child(5) > .menu__list-item-collapsible > .menu__link').click() + cy.get(':nth-child(5) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/about/license`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get(':nth-child(5) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/about/viewpoint`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - accounts page links work and go to the correct pages', () => { + // frequently asked questions collapsible section takes the user + // to an index page + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(1) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(1) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Accounts/change-billing`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(1) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Accounts/configurable-snapshot-path`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(1) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Accounts/dbt-specific-jinja`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(1) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Accounts/git-account-in-use`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(1) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Accounts/payment-accepted`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(1) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Accounts/slack`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - core page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(2) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(2) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Core/install-pip-best-practices.md`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(2) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Core/install-pip-os-prereqs.md`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(2) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Core/install-python-compatibility`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - docs page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(3) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(3) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Docs/document-all-columns`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(3) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Docs/document-other-resources`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(3) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Docs/documenting-macros`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(3) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Docs/long-descriptions`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(3) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Docs/sharing-documentation`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - environments page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(4) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(4) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Environments/beta-release`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(4) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Environments/diff-database-environment`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(4) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Environments/profile-env-vars`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(4) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Environments/profile-name`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(4) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Environments/target-names`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - git page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(5) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(5) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Git/gitignore`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(5) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Git/gitlab-authentication`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(5) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Git/gitlab-selfhosted`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(5) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Git/google-cloud-repo`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(5) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Git/managed-repo`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(5) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Git/run-on-pull`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - jinja page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(6) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Jinja/jinja-whitespace`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(6) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Jinja/quoting-column-names`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(6) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Jinja/which-jinja-docs`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - models page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/available-configurations`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/available-materializations`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/configurable-model-path`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/create-a-schema`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/create-dependencies`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/insert-records`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(7) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/model-custom-schemas`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(8) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/reference-models-in-another-project`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(9) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/removing-deleted-models`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(10) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/run-downtime`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(11) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/source-quotes`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(12) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/specifying-column-types`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(13) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/sql-dialect`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(7) > .menu__list > :nth-child(14) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Models/unique-model-names`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - project page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/dbt-source-freshness`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/debugging-jinja`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/define-a-column-type`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/docs-for-multiple-projects`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/example-projects`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/exclude-table-from-freshness`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(7) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/multiple-resource-yml-files`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(8) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/project-name`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(9) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/properties-not-in-config`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(10) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/resource-yml-name`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(11) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/schema-yml-name`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(12) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/separate-profile`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(13) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/source-has-bad-name`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(14) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/source-in-different-database`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(15) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/structure-a-project`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(16) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/which-materialization`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(17) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/which-schema`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(18) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/why-not-write-dml`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(19) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/why-so-many-macros`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(20) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/why-version-2`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(8) > .menu__list > :nth-child(21) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Project/yaml-file-extension`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - runs page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Runs/checking-logs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Runs/failed-prod-run`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Runs/failed-tests`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Runs/run-downstream-of-seed`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Runs/run-one-model`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Runs/run-one-snapshot`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list > :nth-child(7) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Runs/running-model-downstream-of-source`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(9) > .menu__list > :nth-child(8) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Runs/snapshot-frequency`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - seeds page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(10) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(10) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Seeds/build-one-seed`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(10) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Seeds/full-refresh-seed`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(10) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Seeds/leading-zeros-in-seed`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(10) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Seeds/load-raw-data-with-seed`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(10) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Seeds/seed-custom-schemas`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(10) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Seeds/seed-datatypes`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(10) > .menu__list > :nth-child(7) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Seeds/seed-hooks`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - snapshots page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(11) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(11) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Snapshots/snapshot-hooks`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(11) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Snapshots/snapshot-schema-changes`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(11) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Snapshots/snapshot-target-schema`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(11) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Snapshots/snapshotting-freshness-for-one-source`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - tests page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/available-tests`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/configurable-data-path`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/configurable-data-test-path`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/custom-test-thresholds`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/recommended-tests`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/test-one-model`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(7) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/testing-seeds`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(8) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/testing-sources`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(9) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/uniqueness-two-columns`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(12) > .menu__list > :nth-child(10) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Tests/when-to-test`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - troubleshooting page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/access-gdrive-credential`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/access_token_error`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/dispatch-could-not-find-package`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/git-revlist-error`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/gitignore`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/gitlab-authentication`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(7) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/nonetype-ide-error`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(8) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/partial-parsing-error`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(9) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/runtime-error-could-not-find-profile`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(10) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/runtime-packages.yml`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(11) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/sql-errors`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(13) > .menu__list > :nth-child(12) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Troubleshooting/unused-model-configurations`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) + + it('verifies all the frequently asked questions - warehouse page links work and go to the correct pages', () => { + cy.get(':nth-child(6) > .menu__list-item-collapsible > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(14) > .menu__list-item-collapsible > .menu__link').click() + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(14) > .menu__list > :nth-child(1) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Warehouse/bq-impersonate-service-account-setup`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(14) > .menu__list > :nth-child(2) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Warehouse/bq-impersonate-service-account-why`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(14) > .menu__list > :nth-child(3) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Warehouse/connecting-to-two-dbs-not-allowed`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(14) > .menu__list > :nth-child(4) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Warehouse/database-privileges`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(14) > .menu__list > :nth-child(5) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Warehouse/loading-data`, `${Cypress.config('baseUrl')}/docs/introduction`) + + cy.get('[style="display: block; overflow: visible; height: auto; will-change: height; transition: height 357ms ease-in-out 0s;"] > :nth-child(14) > .menu__list > :nth-child(6) > .menu__link').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/Warehouse/sample-profiles`, `${Cypress.config('baseUrl')}/docs/introduction`) + }) +}) \ No newline at end of file diff --git a/website/cypress/e2e/homepage.cy.js b/website/cypress/e2e/homepage.cy.js new file mode 100644 index 00000000000..6ffee3454f0 --- /dev/null +++ b/website/cypress/e2e/homepage.cy.js @@ -0,0 +1,61 @@ +const sizes = ['iphone-x', [768, 1024], [1280, 720]] + +describe('docs.getdbt.com homepage', () => { + before(function () { + Cypress.on('uncaught:exception', (err, runnable) => { + // returning false here prevents Cypress from + // failing the test + return false; + }); + }); + + beforeEach(() => { + cy.visit('/'); + }); + + sizes.forEach((size) => { + it('verifies all the button links work and go to the correct page', () => { + if (Cypress._.isArray(size)) { + cy.viewport(size[0], size[1]) + } else { + cy.viewport(size) + } + + cy.get('[style="max-width:var(--ifm-container-width);margin:calc(2vh) auto calc(2vh)"] > :nth-child(1) > .card > .card__footer > .button').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/introduction`, `${Cypress.config('baseUrl')}/`) + + cy.get('[style="max-width:var(--ifm-container-width);margin:calc(2vh) auto calc(2vh)"] > :nth-child(2) > .card > .card__footer > .button').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/guides/getting-started`, `${Cypress.config('baseUrl')}/`) + + cy.get('[style="max-width:var(--ifm-container-width);margin:calc(2vh) auto calc(2vh)"] > :nth-child(3) > .card > .card__footer > .button').invoke('removeAttr', 'target').click() + cy.checkLinksNotBroken('https://www.getdbt.com/dbt-learn/', `${Cypress.config('baseUrl')}/`) + + cy.get(':nth-child(2) > :nth-child(2) > :nth-child(1) > .card > .card__footer > .button').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/introduction`, `${Cypress.config('baseUrl')}/`) + + cy.get(':nth-child(2) > :nth-child(2) > :nth-child(2) > .card > .card__footer > .button').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/reference/dbt_project.yml`, `${Cypress.config('baseUrl')}/`) + + cy.get(':nth-child(2) > :nth-child(2) > :nth-child(3) > .card > .card__footer > .button').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/faqs`, `${Cypress.config('baseUrl')}/`) + + cy.get(':nth-child(4) > :nth-child(1) > .card > .card__footer > .button').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/dbt-cloud/cloud-overview`, `${Cypress.config('baseUrl')}/`) + + cy.get(':nth-child(4) > :nth-child(2) > .card > .card__footer > .button').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/docs/dbt-cloud/dbt-cloud-api/cloud-apis`, `${Cypress.config('baseUrl')}/`) + + cy.get(':nth-child(2) > :nth-child(1) > .card > .card__footer > .button').contains('Get Advice').invoke('removeAttr', 'target').click() + cy.checkLinksNotBroken('https://discourse.getdbt.com/', `${Cypress.config('baseUrl')}/`) + + cy.get(':nth-child(2) > :nth-child(2) > .card > .card__footer > .button').contains('Join us on Slack').then(($button => { + cy.wrap($button).should('have.attr', 'href').and('eq', 'http://community.getdbt.com/') + cy.wrap($button).invoke('removeAttr', 'target').click() + })) + cy.checkLinksNotBroken('https://www.getdbt.com/community/', `${Cypress.config('baseUrl')}/`) + + cy.get(':nth-child(2) > :nth-child(3) > .card > .card__footer > .button').contains('View Projects').click() + cy.checkLinksNotBroken(`${Cypress.config('baseUrl')}/faqs/project/example-projects`, `${Cypress.config('baseUrl')}/`) + }) + }) +}) \ No newline at end of file diff --git a/website/cypress/support/commands.js b/website/cypress/support/commands.js new file mode 100644 index 00000000000..ed44b0f8810 --- /dev/null +++ b/website/cypress/support/commands.js @@ -0,0 +1,32 @@ +// *********************************************** +// This example commands.js shows you how to +// create various custom commands and overwrite +// existing commands. +// +// For more comprehensive examples of custom +// commands please read more here: +// https://on.cypress.io/custom-commands +// *********************************************** +// +// +// -- This is a parent command -- +// Cypress.Commands.add('login', (email, password) => { ... }) +// +// +// -- This is a child command -- +// Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... }) +// +// +// -- This is a dual command -- +// Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... }) +// +// +// -- This will overwrite an existing command -- +// Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... }) +Cypress.Commands.add('byTestId', (testId) => {`[data-testid=${testId}]`}); +Cypress.Commands.add('checkLinksNotBroken', (desiredPage, originalPage) => { + cy.url().should('eq', desiredPage) + cy.get('body').should('not.contain', 'Page Not Found') + cy.go('back') + cy.url().should('eq', originalPage) +}) diff --git a/website/cypress/support/e2e.js b/website/cypress/support/e2e.js new file mode 100644 index 00000000000..0e7290a13d9 --- /dev/null +++ b/website/cypress/support/e2e.js @@ -0,0 +1,20 @@ +// *********************************************************** +// This example support/e2e.js is processed and +// loaded automatically before your test files. +// +// This is a great place to put global configuration and +// behavior that modifies Cypress. +// +// You can change the location of this file or turn off +// automatically serving support files with the +// 'supportFile' configuration option. +// +// You can read more here: +// https://on.cypress.io/configuration +// *********************************************************** + +// Import commands.js using ES2015 syntax: +import './commands' + +// Alternatively you can use CommonJS syntax: +// require('./commands') \ No newline at end of file diff --git a/website/dbt-global-variables.js b/website/dbt-global-variables.js index 608cd246a4e..8ee4499151e 100644 --- a/website/dbt-global-variables.js +++ b/website/dbt-global-variables.js @@ -16,5 +16,17 @@ exports.dbtVariables = { "version": "0.21" } ] - } + }, + dbtTheProduct: { + name: "dbt" + }, + dbtCore: { + name: "dbt Core" + }, + dbtCloud: { + name: "dbt Cloud" + }, + dbtIDE: { + name: "dbt Cloud IDE" + }, } diff --git a/website/dbt-versions.js b/website/dbt-versions.js index 27cb1f821ef..03f2721e42d 100644 --- a/website/dbt-versions.js +++ b/website/dbt-versions.js @@ -1,11 +1,19 @@ exports.versions = [ + { + version: "1.3", + EOLDate: "2023-10-12", + }, + { + version: "1.2", + EOLDate: "2023-07-26", + }, { version: "1.1", - EOLDate: "2023-04-28", // TODO estimated for now + EOLDate: "2023-04-28", }, { version: "1.0", - EOLDate: "2023-12-03" + EOLDate: "2022-12-03" }, { version: "0.21", @@ -19,11 +27,23 @@ exports.versions = [ exports.versionedPages = [ { - "page": "docs/contributing/testing-a-new-adapter", - "firstVersion": "1.1", + "page": "reference/dbt-jinja-functions/set", + "firstVersion": "1.2", }, { - "page": "docs/guides/migration-guide/upgrading-to-v1.1", + "page": "reference/dbt-jinja-functions/zip", + "firstVersion": "1.2", + }, + { + "page": "reference/dbt-jinja-functions/cross-database-macros", + "firstVersion": "1.2", + }, + { + "page": "reference/resource-configs/grants", + "firstVersion": "1.2", + }, + { + "page": "docs/contributing/testing-a-new-adapter", "firstVersion": "1.1", }, { diff --git a/website/docs/community/contribute.md b/website/docs/community/contribute.md new file mode 100644 index 00000000000..037edf0e430 --- /dev/null +++ b/website/docs/community/contribute.md @@ -0,0 +1,47 @@ +--- +title: Become a contributor +id: contribute +--- + +
+ +## Want to get involved? Start here + +The dbt Community predates dbt Labs as an organization and harkens back to the days when a scrappy analytics consultancy of a few [pissed off data analysts]() started hacking together an open source project around which gathered a community that would change how the world uses data. + +The dbt Community exists to allow analytics practitioners to share their knowledge, help others and collectively to drive forward the discipline of analytics engineering. This is something that can’t be done by any one individual or any one organization - to create a new discipline is necessarily a community effort. The only reason that dbt has become as widespread as it has is because people like you choose to get involved and share your knowledge. Contributing to the community can also be a great way to learn new skills, build up a public portfolio and make friends with other practitioners. + +There are opportunities here for everyone to get involved, whether you are just beginning your analytics engineering journey or you are a seasoned data professional. Contributing isn’t about knowing all of the answers, it’s about learning things together. + +Below you’ll find a sampling of the ways to get involved. There are a lot of options but these are ultimately just variations on the theme of sharing knowledge with the broader community. + +
+ + + + + + + + + +
+
diff --git a/website/docs/community/contributing/contributing-coding.md b/website/docs/community/contributing/contributing-coding.md new file mode 100644 index 00000000000..9157eb6b2ec --- /dev/null +++ b/website/docs/community/contributing/contributing-coding.md @@ -0,0 +1,59 @@ +--- +title: "Coding contributions" +id: "contributing-coding" +--- + +### Contribute to dbt Packages + +#### Overview + +[dbt Packages](https://docs.getdbt.com/docs/building-a-dbt-project/package-management) are the easiest way for analytics engineers to get involved with contributing code to the dbt Community, because dbt Packages are just standard [dbt Projects](https://docs.getdbt.com/docs/building-a-dbt-project/projects). If you can create a dbt Project, write a macro, and ref a model: you can make a dbt Package. Packages function much like libraries do in other programming languages. They allow for prewritten, modularized development of code to solve common problems in analytics engineering. You can view all dbt Packages on the [dbt Package Hub](https://hub.getdbt.com/). + +#### Contribution opportunities + +- Create a new package for the dbt Package Hub. This might be a new set of macros or tests that have been useful to you in your projects, a set of models for engaging with a commonly used datasource or anything else that can be done from within a dbt project. +- Improve an existing package: Alternatively you can help improve an existing package. This can be done by creating and engaging with Issues or by functionality to address an existing issue via opening a PR. + +#### Sample contributions + +- [dbt Expectations](https://hub.getdbt.com/calogica/dbt_expectations/latest/) +- [dbt Artifacts](https://hub.getdbt.com/brooklyn-data/dbt_artifacts/latest/) + +#### Get started + +- Use packages in your own projects! The best way to know how to improve a package is to use it in a production environment then look for ways it can be modified or improved. +- Read the following resources on package development: + - [So You Want to Build a dbt Package](https://docs.getdbt.com/blog/so-you-want-to-build-a-package) + - [Package Best Practices](https://github.com/dbt-labs/hubcap/blob/main/package-best-practices.md) +- Need help: Visit #package-ecosystem in the dbt Slack + +### Contribute to dbt open source software + +#### Overview + +dbt Core, adapters, tooling, and the sites powering the Package Hub and Developer Hub are all vibrant open source projects. Unlike dbt Packages, contributing code to these projects typically requires some working knowledge of programming languages outside of SQL and Jinja, but the supportive community around these repositories can help you advance those skills. Even without contributing code, there are many ways to be part of open source development in these projects, detailed below. You can find a curated list of the most active OSS projects that dbt Labs supports [here](/community/resources/oss-projects). + +#### Contribution opportunities + +There are three primary ways to contribute to the dbt OSS projects. We’ll use dbt Core as an example, as it’s the most active and mature OSS project we support, and a great place to start for newcomers: + +- [Open an issue](https://github.com/dbt-labs/dbt-core/issues/new/choose) to suggest an improvement or give feedback. +- Comment / engage on existing [issues](https://github.com/dbt-labs/dbt-core/issues) or [discussions](https://github.com/dbt-labs/dbt-core/discussions). This could be upvoting issues that would be helpful for your organization, commenting to add nuance to a feature request or sharing how a feature would impact your dbt usage. +- Create a pull request that resolves an open Issue. This involves writing the code and tests that add the feature/resolve the bug described in an Issue, and then going through the code review process asynchronously with a dbt Labs engineer. + +#### Sample contributions + +- Check out [this issue](https://github.com/dbt-labs/dbt-core/issues/3612) about improving error messages and [the PR that the community contributed to fix it](https://github.com/dbt-labs/dbt-core/pull/3703). +- From the above issue [another issue was generated](https://github.com/dbt-labs/dbt-bigquery/issues/202) to change not just the error message, but improve the behavior. This is the virtuous cycle of open source community development! Bit by bit we, the community, craft the tool to better fit our needs. + +#### Get started + +- Read the dbt Core [contribution guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and the [Open Source Software Expectations](https://docs.getdbt.com/docs/contributing/oss-expectations). +- If contributing to dbt Core, find an issue labeled “[good first issue](https://github.com/dbt-labs/dbt-core/issues?q=is%3Aopen+is%3Aissue+label%3Agood_first_issue)”, or look for similar labels on other repositories. If in doubt, also feel free to ask the maintainers for a good first issue, they’ll be excited to welcome you! + +#### Need help? + +The following channels in the dbt Community Slack are a great place to ask questions: + +- #dbt-core-development +- #adapter-ecosystem diff --git a/website/docs/community/contributing/contributing-online-community.md b/website/docs/community/contributing/contributing-online-community.md new file mode 100644 index 00000000000..400a4dadc32 --- /dev/null +++ b/website/docs/community/contributing/contributing-online-community.md @@ -0,0 +1,46 @@ +--- +title: "Online community building" +id: "contributing-online-community" +--- + +### Maintaining a channel in the dbt Community Slack + +#### Overview + +The dbt Slack is the place for real time conversations with the dbt Community. Slack channels exist for specific locations, tools, industries and methodologies. In order to make sure that every channel has dedicated attention from a committed community member, we have Community Maintainers who oversee the discussion in particular channels. + +#### Contribution opportunities + +Every channel can benefit from people who are engaged and committed to making it a more interesting place to hang out! If there's a channel that you're interested in, spend time there. + +For new channels that you'd like to create and maintain, post a message in the #channel-requests channel. + +#### Sample contributions: + +- Karen Hsieh's [contributions](https://getdbt.slack.com/archives/C02TU2DSKND/p1661483529756289) to the #local-taipei channel are a fantastic example to learn from. + +#### Get started + +- Read the guide to [Maintaining a Slack Channel](community/resources/maintaining-a-channel) + +### Participating on the Community Forum + +#### Overview + +[The dbt Community Forum](https://discourse.getdbt.com) is the preferred platform for support questions as well as a space for long-lived discussions about dbt, analytics engineering, and the analytics profession. It's a place for us to build up a long-lasting knowledge base around the common challenges, opportunities, and patterns we work with every day. + +#### Contribution opportunities + +Participate in the Forum by asking and answering questions. These discussions are what allows us to find gaps in our best practices, documentation and other recommendation, as well as to get folks onboarded and understanding dbt. Remember, it’s a mitzvah to answer a question. + +If you see a great question or answer, be generous with your 💜 reactions. Click the Solved button when your question is answered, so others can benefit. + +#### Sample contributions + +- An analytics engineer wrote about [how they modified dbt to automatically put models into the correct schema](https://discourse.getdbt.com/t/extracting-schema-and-model-names-from-the-filename/575) based on their filename. +- Here's [an example of the supportive, thorough answers](https://discourse.getdbt.com/t/is-it-possible-to-have-multiple-files-with-the-same-name-in-dbt/647) you can receive when you take the time to ask a question well. + +#### Get started + +- Read the [Community Forum Guidelines](/community/resources/forum-guidelines) to understand what topics are a good fit and why this space is important in building long-term community knowledge. +- Head over to the “[Help](https://discourse.getdbt.com/c/help/19)” section of the forum and look for areas to hop in! You don’t need to know the exact answer to a question to be able to provide a helpful pointer. diff --git a/website/docs/community/contributing/contributing-realtime-events.md b/website/docs/community/contributing/contributing-realtime-events.md new file mode 100644 index 00000000000..36d1c9e2339 --- /dev/null +++ b/website/docs/community/contributing/contributing-realtime-events.md @@ -0,0 +1,47 @@ +--- +title: "Realtime event participation" +id: "contributing-realtime-events" +--- + +### Speak at a Meetup + +#### Overview + +Meetups are a place to engage and interact with your fellow dbt Community members (in person when possible but sometimes online). We’ve got 21 Meetups in 16 countries. + +#### Contribution opportunities + +Give a talk! Meetups are all about sharing your knowledge with other analytics practitioners. Have you recently solved a problem in your data organization, published a package or generally done something of interest to the dbt Community? Meet your local pals and share what you’ve done at a meetup. + +#### Sample contributions + +- Take a look at [the slides](https://docs.google.com/presentation/d/1iqVjzxxRggMnRoI40ku88miDKw795djpKV_v4bbLpPE/edit#slide=id.g553a984de0_0_19) and [watch the video](https://www.youtube.com/watch?v=BF7HH8JDUS0) from Kenny Ning's 2020 Meetup talk on predicting customer conversions with dbt and ML for Better.com. +- Dig into [the deck](https://docs.google.com/presentation/d/1wE8NSkFPLFKGQ8fvFUUKoZFVoUhws_FhFip-9mDhoPU/edit#slide=id.p) and [the video](https://www.youtube.com/watch?v=VhH614WVufM) from Bowery Farmings talk on migrating dbt models from Redshift to Snowflake. + +#### Get Started + +- Read [How to Deliver a Fantastic Meetup Talk](/community/resources/speaking-at-a-meetup). +- Find a [Meetup near you](https://www.meetup.com/pro/dbt/), start attending and let the organizers know you are interested! + +### Speak at Coalesce + +#### Overview + +[Coalesce](https://coalesce.getdbt.com/) is the annual analytics engineering conference hosted by dbt Labs. While Meetups are focused on sharing knowledge with a specific local hub of the Community - Coalesce is the way to share ideas with everyone. Each year we gather together, take stock of what we’ve learned and pool our best ideas about analytics. + +#### Contribution opportunities + +- Attend Coalesce: + - Coalesce is the once a year gathering for analytics engineers. Whether you choose to join online or at one of our in-person events, attending Coalesce is the best way to get an immersive experience of what the dbt Community is like. +- Speak at Coalesce! + - We’d love to hear what you’ve been working on, thinking about and dreaming up in the analytics engineering space. Coalesce talks can be forward looking views on the industry, deep dives into particular technical solutions or personal stories about your journey in data. + +#### Sample contributions + +- [Run Your Data Team as a Product Team](https://www.getdbt.com/coalesce-2020/run-your-data-team-as-a-product-team/) +- [Tailoring dbt's incremental_strategy to Artsy's data needs](https://www.getdbt.com/coalesce-2021/tailoring-dbts-incremental-strategy-to-artsys-data-needs/) + +#### Get started + +- If registrations are open register on the [Coalesce website](https://coalesce.getdbt.com/) +- Join #coalesce-updates on the dbt Community Slack diff --git a/website/docs/community/contributing/contributing-writing.md b/website/docs/community/contributing/contributing-writing.md new file mode 100644 index 00000000000..86635e651c7 --- /dev/null +++ b/website/docs/community/contributing/contributing-writing.md @@ -0,0 +1,55 @@ +--- +title: "Writing contributions" +id: "contributing-writing" +--- + +### Contribute to the product documentation + +#### Overview + +The [dbt Product Documentation](https://docs.getdbt.com/docs/introduction) sits at the heart of how people learn to use and engage with dbt. From explaining dbt to newcomers to providing references for advanced functionality and APIs, the product docs are a frequent resource for _every_ dbt Developer. + +#### Contribution opportunities + +We strive to create pathways that inspire you to learn more about dbt and enable you to continuously improve the way you solve data problems. We always appreciate the vigilance of the Community helping us to accurately represent the functionality and capabilities of dbt. You can participate by: + +- [Opening an issue](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) when you see something that can be fixed, whether it’s large or small. +- Creating a PR when you see something you want to fix, or to address an existing issue. You can do this by clicking **Edit this page** at the bottom of any page on [docs.getdbt.com](http://docs.getdbt.com). + +#### Sample contributions + +We appreciate these contributions because they contain context in the original post (OP) that helps us understand their relevance. The also add value to the docs, even in small ways! + +- Larger contribution: https://github.com/dbt-labs/docs.getdbt.com/pull/1898 +- Smaller contribution: https://github.com/dbt-labs/docs.getdbt.com/pull/1114 + +#### Get started + +- You can contribute to [docs.getdbt.com](http://docs.getdbt.com) by looking at our repository’s [README](https://github.com/dbt-labs/docs.getdbt.com#readme) or clicking **Edit this page** at the bottom of most pages at docs.getdbt.com. +- Read the [OSS Expectations](https://docs.getdbt.com/docs/contributing/oss-expectations). +- Find an issue labeled “[good first issue](https://github.com/dbt-labs/docs.getdbt.com/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22+).” +- Need help: Visit #community-writers on the Community Slack or mention `@dbt-labs/product-docs` in a pull request or issue comment. + +### Write a Developer Blog Post + +#### Overview + +The [dbt Developer Blog](https://docs.getdbt.com/blog) is the place for analytics practitioners to talk about _what it’s like to do data work right now._ This is the place to share tips and tricks, hard won knowledge and stories from the trenches with the dbt Community. + +#### Contribution opportunities + +We want to hear your stories! Did you recently solve a cool problem, discover an interesting bug or lead an organizational change? Come tell the story on the dbt Developer Blog. + +#### Sample contributions + +- [Founding an Analytics Engineering Team From Scratch](https://docs.getdbt.com/blog/founding-an-analytics-engineering-team-smartsheet#our-own-take-on-data-mesh) +- [Tackling the Complexity of Joining Snapshots](https://docs.getdbt.com/blog/joining-snapshot-complexity) + +#### Get started + +- [Read the contribution guide](https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/developer-blog.md) +- [Open up an issue with your idea for a post](https://github.com/dbt-labs/docs.getdbt.com/issues/new?assignees=&labels=content%2Cdeveloper+blog&template=contribute-to-developer-blog.yml) + +#### Need help? + +Visit #community-writers in the dbt Community Slack diff --git a/website/docs/community/join.md b/website/docs/community/join.md new file mode 100644 index 00000000000..254a1a8547c --- /dev/null +++ b/website/docs/community/join.md @@ -0,0 +1,53 @@ +--- +title: Join the Community +id: join +--- + +
+ +Want to learn how organizations around the world are tackling the biggest challenges in data while making new friends from the best analytics teams? Join the dbt Community — data practitioners’ favorite place to learn new skills, keep on top of industry trends, and forge connections. + +
+ + + + + + + + + + + + + +
+
diff --git a/website/docs/community/resources/code-of-conduct.md b/website/docs/community/resources/code-of-conduct.md new file mode 100644 index 00000000000..6788f3ae39f --- /dev/null +++ b/website/docs/community/resources/code-of-conduct.md @@ -0,0 +1,167 @@ +--- +title: "Code of Conduct" +id: "code-of-conduct" +--- + +# dbt Community Code of Conduct + +dbt has a supportive, active community of thousands of smart, kind, and helpful people who share a commitment to elevating the analytics profession. + +You can get involved in the dbt community by connecting at [events](/community/events), getting or giving help in any of our many channels, contributing to dbt or a dbt package, and many other ways. + +People genuinely love this community, and we are committed to maintaining the spirit of it. As such have written this Code of Conduct to help all participants understand how to best participate in our community. + +The Code of Conduct applies to all dbt Community spaces both online and off. This includes: Slack, Discourse, code repositories (dbt Core, dbt packages etc), Office Hours and Meetups. There are some guidelines specific to particular forums (listed below). Participants are responsible for knowing and abiding by this Code of Conduct. + +This Code of Conduct has three sections: + +- **dbt Community Values:** These values apply to all of our community spaces, and all of our guidelines are based on these values. +- **Forum-specific guidelines**: These guidelines explain some of the cultural norms that apply to specific forums. +- **Anti-harassment policy:** We are dedicated to providing a harassment-free experience for everyone in our community — here, we outline exactly what that means. + +We appreciate your support in continuing to build a community we’re all proud of. + +— The dbt Community Admin Team. + +## dbt Community Values + +### Be respectful. + +We want everyone to have a fulfilling and positive experience in the dbt Community and we are continuously grateful in your help ensuring that this is the case. + +Be courteous, respectful, and polite to fellow community members. Generally, don’t be a jerk. + +Be considerate of others’ time — many people in the community generously give their time for free. + +Ways to demonstrate this value: + +- Take the time to write bug reports well ([example](https://github.com/fishtown-analytics/dbt/issues/2370)) +- Thank people if they help solve a problem. + +### Be you. + +Some developer communities allow and even encourage anonymity — we prefer it when people identify themselves clearly. It helps to build empathy, and form relationships. + +Ways to demonstrate this value: + +- Update your profile on any dbt Community forums to include your name, and a clear picture. On Slack, use the “what I do” section to add your role title and current company +- Write in your own voice, and offer your own advice, rather than speaking in your company’s marketing or support voice. + +### Encourage diversity and participation. + +People with different mindsets and experiences, working together, create better outcomes. This includes diversity of race and gender, as well as the diversity of academic and career backgrounds, socio-economic backgrounds, geographic backgrounds, ideologies, and interests. + +Ways to demonstrate this value: + +- Make everyone in our community feel welcome, regardless of their background, and do everything possible to encourage participation in our community. +- Demonstrate empathy for a community member’s experience — not everyone comes from the same career background, so adjust answers accordingly. +- If you are sourcing speakers for events, put in additional effort to find speakers from underrepresented groups. + +### Create more value than you capture. + +Each community member should strive to create more value in the community than they capture. This is foundational to being a community. + +Ways to demonstrate this value: + +- Contribute to dbt or a dbt package +- Participate in discussions on Slack and Discourse +- Share things you have learned on Discourse +- Host events + +Be mindful that others may not want their image or name on social media, and when attending or hosting an in-person event, ask permission prior to posting about another person. + +### Be curious. + +Always ask yourself “why?” and strive to be continually learning. + +Ways to demonstrate this value: + +- Try solving a problem yourself before asking for help, e.g. rather than asking “what happens when I do X”, experiment and observe the results! +- When asking questions, explain the “why” behind your decisions, e.g. “I’m trying to solve X problem, by writing Y code. I’m getting Z problem” +- When helping someone else, explain why you chose that solution, or if no solution exists, elaborate on the reason for that, e.g. “That’s not possible in dbt today — but here’s a workaround / check out this GitHub issue for a relevant discussion” + +## Guidelines + +### Participating in Slack + +dbt Slack is where the dbt community hangs out, discusses issues, and troubleshoots problems together. It is not a support service — please do not treat it like one. + +We also have a number of cultural norms in our Slack community. You must read and agree to the rules before joining Slack, but you can also find them [here](/community/resources/slack-rules-of-the-road/). + +As a short summary: + +- [Rule 1: Be respectful](/community/resources/slack-rules-of-the-road/#rule-1-be-respectful) +- [Rule 2: Use the right channel](/community/resources/slack-rules-of-the-road/#rule-2-use-the-right-channel) +- [Rule 3: Put effort into your question](/community/resources/slack-rules-of-the-road/#rule-3-put-effort-into-your-question) +- [Rule 4: Do not double-post](/community/resources/slack-rules-of-the-road/#rule-4-do-not-double-post) +- [Rule 5: Keep it in public channels](/community/resources/slack-rules-of-the-road/#rule-5-keep-it-in-public-channels) +- [Rule 6: Do not solicit members of our Slack](/community/resources/slack-rules-of-the-road/#rule-6-do-not-solicit-members-of-our-slack) +- [Rule 7: Do not demand attention with @channel and @here, or by tagging individuals](/community/resources/slack-rules-of-the-road/#rule-7-do-not-demand-attention-with-channel-and-here-or-by-tagging-individuals) +- [Rule 8: Use threads](/community/resources/slack-rules-of-the-road/#rule-8-use-threads) + +### Vendor guidelines + +If you are a vendor (i.e. you represent an organization that sells a product or service relevant to our community), then there are additional guidelines you should be aware of. + +Most importantly — do not solicit members of our community as lead generation. You can find the rest of these [here](/community/resources/vendor-guidelines). + +### Guideline violations — 3 strikes method + +The point of our guidelines is not to find opportunities to punish people, but we do need a fair way to deal with people who do harm to our community. Violations related to our anti-harassment policy (below) will be addressed immediately and are not subject to 3 strikes. + +1. First occurrence: We’ll give you a friendly, but public, reminder that the behavior is inappropriate according to our guidelines. +2. Second occurrence: We’ll send you a private message with a warning that any additional violations will result in removal from the community. +3. Third occurrence: Depending on the violation, we might need to delete or ban your account. + +Notes: + +- Obvious spammers are banned on first occurrence. +- Participation in the dbt Community is a privilege — we reserve the right to remove people from the community. +- Violations are forgiven after 6 months of good behavior, and we won’t hold a grudge. +- People who are committing minor formatting / style infractions will get some education, rather than hammering them in the 3 strikes process. +- Contact conduct@getdbt.com to report abuse or appeal violations. In the case of appeals, we know that mistakes happen, and we’ll work with you to come up with a fair solution if there has been a misunderstanding. + +## Anti-harassment policy + +Further to our guidelines for participating in the community in a positive manner, we are also dedicated to providing a harassment-free experience for everyone. We do not tolerate harassment of participants in any form. + +Harassment includes: + +- Offensive comments related to gender, gender identity and expression, sexual orientation, disability, mental illness, neuro(a)typicality, physical appearance, body size, age, race, or religion. +- Unwelcome comments regarding a person’s lifestyle choices and practices, including those related to food, health, parenting, drugs, and employment. +- Deliberate misgendering or use of ‘dead’ or rejected names. +- Gratuitous or off-topic sexual images or behaviour in spaces where they’re not appropriate. +- Physical contact and simulated physical contact (eg, textual descriptions like “*hug*” or “*backrub*”) without consent or after a request to stop. +- Threats of violence. +- Incitement of violence towards any individual, including encouraging a person to commit suicide or to engage in self-harm. +- Deliberate intimidation. +- Stalking or following. +- Harassing photography or recording, including logging online activity for harassment purposes. +- Sustained disruption of discussion. +- Unwelcome sexual attention. +- Pattern of inappropriate social contact, such as requesting/assuming inappropriate levels of intimacy with others +- Continued one-on-one communication after requests to cease. +- Deliberate “outing” of any aspect of a person’s identity without their consent except as necessary to protect vulnerable people from intentional abuse. +- Publication of non-harassing private communication. + +The dbt Community prioritizes marginalized people’s safety over privileged people’s comfort. The dbt Community Admin team reserves the right not to act on complaints regarding: + +- ‘Reverse’ -isms, including ‘reverse racism,’ ‘reverse sexism,’ and ‘cisphobia’ +- Reasonable communication of boundaries, such as “leave me alone,” “go away,” or “I’m not discussing this with you.” +- Communicating in a ‘tone’ you don’t find congenial +- Criticizing racist, sexist, cissexist, or otherwise oppressive behavior or assumptions + +### Reporting harassment + +If you are being harassed by a member of the dbt Community, notice that someone else is being harassed, or have any other concerns, please contact us at [community@dbtlabs.com](mailto:community@dbtlabs.com). + +We will respect confidentiality requests for the purpose of protecting victims of abuse. At our discretion, we may publicly name a person about whom we’ve received harassment complaints, or privately warn third parties about them, if we believe that doing so will increase the safety of dbt community members or the general public. We will not name harassment victims without their affirmative consent. + +### Consequences +Participants asked to stop any harassing behavior are expected to comply immediately. + +If a participant engages in harassing behavior, the dbt Community Admin team may take any action they deem appropriate, up to and including expulsion from all dbt Community spaces and identification of the participant as a harasser to other dbt Community members or the general public. + +## Credits + +Credit to [01.org](https://01.org/community/participation-guidelines), [Tizen.org](https://www.tizen.org/community/guidelines), and [Geek Feminism](https://geekfeminism.wikia.org/wiki/Community_anti-harassment/Policy) for some of the wording used in this Code of Conduct. diff --git a/website/docs/docs/contributing/contributor-license-agreements.md b/website/docs/community/resources/contributor-license-agreements.md similarity index 100% rename from website/docs/docs/contributing/contributor-license-agreements.md rename to website/docs/community/resources/contributor-license-agreements.md diff --git a/website/docs/community/resources/forum-guidelines.md b/website/docs/community/resources/forum-guidelines.md new file mode 100644 index 00000000000..9c86c703663 --- /dev/null +++ b/website/docs/community/resources/forum-guidelines.md @@ -0,0 +1,52 @@ +--- +title: "Community Forum guidelines" +id: "forum-guidelines" +--- + +## What is the dbt Community Forum? + +[The dbt Community Forum](https://discourse.getdbt.com) is the platform for getting support with dbt as well as to have long-lived discussions about dbt, analytics engineering, and analytics. It's a place for us to build up a long-lasting knowledge base around the common challenges, opportunities, and patterns we work with every day. The forum is different from the dbt Community Slack in a few key ways, most importantly it is: + +- **Asynchronous** and **long-lived** - sometimes conversations continue over weeks, months, and beyond. +- **Intentional** - we recommend taking at least 5 to 10 minutes thinking about and shaping your initial post and any comments. +- **Citable** - Slack conversations tend to be great in the moment but get lost in the flow — forum posts can more easily shared and referenced. + +**Guidelines for engaging on the Forum** + +The community [Rules of the Road](https://docs.getdbt.com/docs/contributing/slack-rules-of-the-road) apply, and following them is the best way to get momentum behind your discussion or answers to your questions. The following guidelines will set you up for success: + +- Be respectful +- Put effort into your posts +- Mark replies as Solutions in the Help section +- Don’t double post + +## Categories + +The forum is broken down into three categories: + +- [Help](https://discourse.getdbt.com/c/help/19) + - This is a Q&A style forum where you can ask the dbt Community for help with specific questions about dbt, dbt Cloud, data modeling, or anything else you want a definitive answer on. + - This category is for questions which can plausibly have a _single correct answer_. + - ✅ How do I debug this Jinja error? + - ✅ How do I set up CI in dbt Cloud on GitHub? + - ❌ What is the best way to do marketing attribution? (More general Discussions like this are perfect for the [In-Depth Discussions](https://discourse.getdbt.com/c/discussions/21) category) +- [Show and Tell](https://discourse.getdbt.com/c/show-and-tell/22) + - This is the place to show off all of the cool things you are doing in dbt. Whether it’s a new macro, design pattern, or package, post here to show the community what you are up to! +- [In-Depth Discussions](https://discourse.getdbt.com/c/discussions/21) + - Share anything you’re thinking about that has to do with dbt or analytics engineering! This is a great place to jot down some thoughts to share with the community or spark a discussion on a topic that’s currently interesting you. + +## Inclusivity on the Community Forum + +We are **strongly** committed to building a community where everyone can feel welcome. + +The dbt community started with people who were not traditionally considered “technical”, did not have ownership over technical systems, and were often left out of organizational decision-making. We came together to learn from each other, solve hard problems together, and to help build a new discipline where data folks have greater ownership over our own work. It really matters to us that everyone feels like they can ask questions and engage, no matter their professional or personal background. + +Technical forums have the potential to replicate harmful power structures, and can feel intimidating or hostile. We are working hard to create and sustain an inclusive environment through community-building, technological solutions, inclusive content, and diverse contributors. This is a long-term project, and we will continue to iterate and make improvements. + +If you have any ideas or feedback on how to make this space friendlier or more inclusive please let us know, either on the community Slack in the #community-strategy channel or via email at [community@dbtlabs.com](mailto:community@dbtlabs.com). We want to hear from you! + +## Following new and ongoing Discussions + +The best way to stay up to date is to [browse the forum](https://discourse.getdbt.com/) directly. You can also Track or Watch specific threads or the whole category to receive updates on them without commenting. + +Each category also has a companion Slack channel (#advice-dbt-for-beginners, #show-and-tell and #in-depth-discussions). You can reply to the initial post in Slack and it will be added as a comment on the forum thread, allowing you to participate from inside Slack if you prefer. diff --git a/website/docs/community/maintaining-a-channel.md b/website/docs/community/resources/maintaining-a-channel.md similarity index 84% rename from website/docs/community/maintaining-a-channel.md rename to website/docs/community/resources/maintaining-a-channel.md index 856082e238d..1961686d4a2 100644 --- a/website/docs/community/maintaining-a-channel.md +++ b/website/docs/community/resources/maintaining-a-channel.md @@ -11,27 +11,27 @@ There are three things you should do to be a good channel maintainer in the [dbt - Keep an eye out in #introductions for folks who might benefit from your new channel. For example if someone works in the space or on the same problems, then direct them to the channel. - Make sure folks follow the [Rules of the Road](https://docs.getdbt.com/docs/contributing/slack-rules-of-the-road) in the channel. If you notice someone is not following one, gently remind them of the rule in thread, and, ideally, provide an example of how they can rephrase their message or where they can redirect it. If you have a question about how to proceed, just post it in #ask-a-moderator with a screenshot or link to the thread and someone will give you advice. -## Scope of the Role +## Scope of the role -A maintainer can be a dbt Labs employee, but does not have to be. *Slack channel maintainer* is philosophically similar to OSS maintainer. At the onset, the channel maintainer will help build up this new space in Slack and stir up conversation during the first few weeks of the channel's existence. They are someone who stays on top of feedback and encourages generative contributions. This is not necessarily someone who is the generator of content and contributions, or answers every question. +A maintainer can be a dbt Labs employee, but does not have to be. *Slack channel maintainer* is philosophically similar to OSS maintainer. At the onset, the channel maintainer will help build up this new space in Slack and stir up conversation during the first few weeks of the channel's existence. They are someone who stays on top of feedback and encourages generative contributions. This is not necessarily someone who is the generator of content and contributions, or answers every question. -## Initial Instructions +## Initial instructions -1. Review the [Rules of the Road](https://docs.getdbt.com/docs/contributing/slack-rules-of-the-road) and [Code of Conduct](https://www.getdbt.com/community/code-of-conduct) and please let the the folks who created the channel know that you read both documents and you agree to be mindful of them. +1. Review the [Rules of the Road](community/resources/slack-rules-of-the-road) and [Code of Conduct](community/resources/code-of-conduct) and please let the the folks who created the channel know that you read both documents and you agree to be mindful of them. 2. If you are a vendor, review the [Vendor Guidelines](https://www.getdbt.com/community/vendor-guidelines). 3. Add the Topic and Description to the channel. @Mention your name in the channel Description, identifying yourself as the maintainer. Ex: *Maintainer: First Last (pronouns).* If you are a vendor, make sure your Handle contains your affiliation. 4. Complete or update your Slack profile by making sure your Company (in the ‘What I do’ field), Pronouns, and Handle, if you’re a vendor, are up-to-date. -5. Post initial conversation topics once a few folks get in the channel to help folks get to know each other. Check out this [example introductory post](https://getdbt.slack.com/archives/C02FXAZRRDW/p1632407767005000). -6. Stir up conversation during the first few weeks of the channel's existence. As you get started, answer the questions you can or help find someone with answers, seed discussions about once a week, and making sure folks follow the Rules of the Road. +5. Post initial conversation topics once a few folks get in the channel to help folks get to know each other. Check out this [example introductory post](https://getdbt.slack.com/archives/C02FXAZRRDW/p1632407767005000). +6. Stir up conversation during the first few weeks of the channel's existence. As you get started, answer the questions you can or help find someone with answers, seed discussions about once a week, and making sure folks follow the Rules of the Road. -## Long-term Expectations +## Long-term expectations - Maintaining the channel, checking in and being active on a regular basis by answering folks' questions, and seeding discussions. Want an example? Check out [this poll](https://getdbt.slack.com/archives/C022A67TLFL/p1628279819038800). - For guidance on how to answer a question, see [Answering Community Questions](https://www.getdbt.com/community/answering-community-questions). If you are not sure how to answer a lingering or unanswered question, you can post about it in #ask-a-moderator or direct it to another channel, if relevant. - If the channel is an industry channel, it’s helpful to monitor [#introductions](https://getdbt.slack.com/archives/CETJLH1V3) and invite people. Keep an eye out for folks who might benefit from being in the new channel if they mention they are working in the space, or are thinking about some of these problems. - Make sure folks follow the [Rules of the Road](https://docs.getdbt.com/docs/contributing/slack-rules-of-the-road). For example, if you notice someone is not following one, gently remind them of the rule in thread, and, ideally, provide an example of how they can rephrase their message or where they can redirect it. If you have a question about how to proceed, just post about it in #ask-a-moderator with a link to the thread or screenshot and someone will give you advice. - In tools channels, sharing customer stories and product updates is very okay in this channel because folks expect that when they join. However, please avoid any direct sales campaigns, pricing offers, etc. -- If you have any questions/doubts about the [Rules of the Road](https://docs.getdbt.com/docs/contributing/slack-rules-of-the-road) or [Vendor Guidelines](https://www.getdbt.com/community/vendor-guidelines), please post a question in #ask-a-moderator about what sort of things the community expects from interactions with vendors. +- If you have any questions/doubts about the [Rules of the Road](/community/resources/slack-rules-of-the-road) or [Vendor Guidelines](/community/resources/vendor-guidelines), please post a question in #ask-a-moderator about what sort of things the community expects from interactions with vendors. - A reminder that we never DM anyone in Slack without their permission in public channel or some prior relationship. - A reminder that @ here/all/channel are disabled. - Use and encourage the use of threads 🧵 to keep conversations tidy! diff --git a/website/docs/community/resources/organizing-inclusive-events.md b/website/docs/community/resources/organizing-inclusive-events.md new file mode 100644 index 00000000000..00a6c54d12b --- /dev/null +++ b/website/docs/community/resources/organizing-inclusive-events.md @@ -0,0 +1,57 @@ +--- +title: "Organizing inclusive events" +id: "organizing-inclusive-events" +--- + +# Tips for organizing inclusive events + +The dbt community is filled with dedicated community leaders who create opportunities for connection, learning and professional development within the analytics community. + +This guide is a resource to help organizers execute **inclusive digital events**. We understand that organizers, presenters, speakers, etc. might not be able to apply these tips to *every* event, but this guide will offer some food for thought. + +Additionally, this list can grow. If you would like to contribute a tip, please email [community@dbtlabs.com](mailto:community@dbtlabs.com). + +## General logistics + +- Try to choose a date that does not overlap with [holidays](http://www.holidayscalendar.com/months/) or general major events. Don’t forget to check international holidays (if applicable) +- Avoid really large national/local events (i.e. World Cup) + +## Marketing + +- If you are using photos, share images that include community members with a wide range of presentations, including people from underrepresented groups. +- Put event accessibility information on your event page (i.e. “closed captioning available for all video resources”) +- In the registration process provide an opportunity for attendees to: + - share pronouns + - ask questions in advance + - request specific needs or other accommodations (interpreting services, braille transcription, dietary restrictions, etc.) +- If this is a paid event (e.g. a conference), create a scholarship for attendees that might need financial support +- Think about how you are promoting your event — are you reaching underrepresented communities, marginalized populations and people who might not have access to the internet? + +## Programming + +- Book diverse speakers. Include speakers that represent underrepresented and marginalized populations. +- Do research on your speakers. Is there any reason that your speakers would make the audience uncomfortable? +- Design an [accessible presentation](https://www.smashingmagazine.com/2018/11/inclusive-design-accessible-presentations/) +- If possible, share a recording after the event for community members who are not able to make it and add closed captioning. +- Ask speakers to introduce themselves before starting their presentation, so that transcription services can capture who is talking. + +## Digital platforms for online events + +- Take a minute or two to explain the features of the platform that attendees will be using in the beginning of the event +- Offer the option for attendees to dial-in by phone and participate without a computer or internet +- Explore the accessibility features your platform offers and apply it where necessary (i.e. closed captioning, automatic transcripts, screen reader support, etc.) +- Check if your platform is compatible with assistive technology + +## Attendee communication + +- Make sure that attendees have any addresses, links, codes, numbers to accessing the event beforehand +- Share the agenda of the event beforehand so that attendees are able to make arrangements (if necessary) +- Share contact information with attendees so that they are able to reach out with questions before and after the event. +- Ask attendees for feedback in a post-event survey so that you are able to improve future experiences. + +## Speaker communication + +- Ask speakers how to pronounce their names before the event +- Ask speakers for their pronouns before the event +- Suggest that speakers use headphones to ensure clear audio +- Ask speakers to use plain language and avoid jargon, slang, idioms, etc. diff --git a/website/docs/docs/contributing/oss-expectations.md b/website/docs/community/resources/oss-expectations.md similarity index 71% rename from website/docs/docs/contributing/oss-expectations.md rename to website/docs/community/resources/oss-expectations.md index 0dfa5e247df..66880bc63b7 100644 --- a/website/docs/docs/contributing/oss-expectations.md +++ b/website/docs/community/resources/oss-expectations.md @@ -1,16 +1,16 @@ --- -title: "Expectations for OSS Contributors" +title: "Expectations for OSS contributors" --- Whether it's a dbt package, a plugin, `dbt-core`, or this very documentation site, contributing to the open source code that supports the dbt ecosystem is a great way to level yourself up as a developer, and to give back to the community. The goal of this page is to help you understand what to expect when contributing to dbt open source software (OSS). While we can only speak for our own experience as open source maintainers, many of these guidelines apply when contributing to other open source projects, too. -Have you seen things in other OSS projects that you quite like, and think we could learn from? [Open a discussion on docs.dbt.com](https://github.com/dbt-labs/docs.getdbt.com/discussions/new), or start a conversation in the dbt Community Slack (for example: `#community-strategy`, `#dbt-core-development`, `#package-ecosystem`, `#adapter-ecosystem`). We always appreciate hearing from you! +Have you seen things in other OSS projects that you quite like, and think we could learn from? [Open a discussion on the Developer Hub](https://github.com/dbt-labs/docs.getdbt.com/discussions/new), or start a conversation in the dbt Community Slack (for example: `#community-strategy`, `#dbt-core-development`, `#package-ecosystem`, `#adapter-ecosystem`). We always appreciate hearing from you! ## Principles ### Open source is participatory -Why take time out of your day to write code you don’t *have* to? We all build dbt together. By using dbt, you’re invested in the future of the tool, and an agent in pushing forward the practice of analytics engineering. You’ve already benefited from using code contributed by community members, and documentation written by community members. Contributing to dbt OSS is your way to pay it forward, as an active participant in the thing we’re all creating together. +Why take time out of your day to write code you don’t _have_ to? We all build dbt together. By using dbt, you’re invested in the future of the tool, and an agent in pushing forward the practice of analytics engineering. You’ve already benefited from using code contributed by community members, and documentation written by community members. Contributing to dbt OSS is your way to pay it forward, as an active participant in the thing we’re all creating together. There’s a very practical reason, too: OSS prioritizes our collective knowledge and experience over any one person’s. We don’t have experience using every database, operating system, security environment, ... We rely on the community of OSS users to hone our product capabilities and documentation to the wide variety of contexts in which it operates. In this way, dbt gets to be the handiwork of thousands, rather than a few dozen. @@ -41,7 +41,7 @@ Discussions are a relatively new GitHub feature, and we really like them! A discussion is best suited to propose a Big Idea, such as brand-new capability in dbt Core, or a new section of the product docs. Anyone can open a discussion, add a comment to an existing one, or reply in a thread. -What to expect from a new Discussion? Hopefully, comments from other members of the community, who like your idea or have their own ideas for how it could be improved. The most helpful comments are ones that describe the kinds of experiences users and readers should have. Unlike an **issue**, there is no specific code change that would “resolve” a Discussion. +What can you expect from a new Discussion? Hopefully, comments from other members of the community, who like your idea or have their own ideas for how it could be improved. The most helpful comments are ones that describe the kinds of experiences users and readers should have. Unlike an **issue**, there is no specific code change that would “resolve” a Discussion. If, over the course of a discussion, we do manage to reach consensus on a way forward, we’ll open a new issue that references the discussion for context. That issue will connect desired outcomes to specific implementation details, as well as perceived limitations and open questions. It will serve as a formal proposal and request for comment. @@ -51,7 +51,7 @@ An issue could be a bug you’ve identified while using the product or reading t ### Best practices for issues -- Issues are **not** for support / troubleshooting / debugging help. Please [open a discussion on docs.getdbt.com](https://github.com/dbt-labs/docs.getdbt.com/discussions/new), so other future users can find and read proposed solutions. If you need help formulating your question, you can post in the `#advice-dbt-for-beginners` channel in the [dbt Community Slack](https://www.getdbt.com/community/). +- Issues are **not** for support / troubleshooting / debugging help. Please [open a discussion on the Developer Hub](https://github.com/dbt-labs/docs.getdbt.com/discussions/new), so other future users can find and read proposed solutions. If you need help formulating your question, you can post in the `#advice-dbt-for-beginners` channel in the [dbt Community Slack](https://www.getdbt.com/community/). - Always search existing issues first, to see if someone else had the same idea / found the same bug you did. - Many repositories offer templates for creating issues, such as when reporting a bug or requesting a new feature. If available, please select the relevant template and fill it out to the best of your ability. This will help other people understand your issue and respond. @@ -77,16 +77,16 @@ In all likelihood, the maintainer who responds will also add a number of labels. In some cases, the right resolution to an open issue might be tangential to the codebase. The right path forward might be in another codebase (we'll transfer it), a documentation update, or a change that can be made in user-space code. In other cases, the issue might describe functionality that the maintainers are unwilling or unable to incorporate into the main codebase. In these cases, a maintainer will close the issue (perhaps using a `wontfix` label) and explain why. -| tag | description | -| --- | ----------- | -| `triage` | This is a new issue which has not yet been reviewed by a maintainer. This label is removed when a maintainer reviews and responds to the issue. | -| `bug` | This issue represents a defect or regression from the behavior that's documented, or that you reasonably expect | -| `enhancement` | This issue represents net-new functionality, including an extension of an existing capability | -| `good first issue` | This issue does not require deep knowledge of the codebase to implement. This issue is appropriate for a first-time contributor. | -| `help wanted` | This issue is trickier than a "good first issue." The required changes are scattered across the codebase, or more difficult to test. The maintainers are happy to help an experienced community contributor; they aren't planning to prioritize this issue themselves. | -| `duplicate` | This issue is functionally identical to another open issue. The maintainers will close this issue and encourage community members to focus conversation on the other one. | -| `stale` | This is an old issue which has not recently been updated. In repositories with a lot of activity, stale issues will periodically be closed. | -| `wontfix` | This issue does not require a code change in the repository, or the maintainers are unwilling to merge a change which implements the proposed behavior. | +| tag | description | +| ------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `triage` | This is a new issue which has not yet been reviewed by a maintainer. This label is removed when a maintainer reviews and responds to the issue. | +| `bug` | This issue represents a defect or regression from the behavior that's documented, or that you reasonably expect | +| `enhancement` | This issue represents net-new functionality, including an extension of an existing capability | +| `good first issue` | This issue does not require deep knowledge of the codebase to implement. This issue is appropriate for a first-time contributor. | +| `help wanted` | This issue is trickier than a "good first issue." The required changes are scattered across the codebase, or more difficult to test. The maintainers are happy to help an experienced community contributor; they aren't planning to prioritize this issue themselves. | +| `duplicate` | This issue is functionally identical to another open issue. The maintainers will close this issue and encourage community members to focus conversation on the other one. | +| `stale` | This is an old issue which has not recently been updated. In repositories with a lot of activity, stale issues will periodically be closed. | +| `wontfix` | This issue does not require a code change in the repository, or the maintainers are unwilling to merge a change which implements the proposed behavior. | ## Pull requests @@ -97,8 +97,9 @@ PRs are your surest way to make the change you want to see in dbt / packages / d **Our goal is to review most new PRs within 7 days.** The first review will include some high-level comments about the implementation, including (at a high level) whether it’s something we think suitable to merge. Depending on the scope of the PR, the first review may include line-level code suggestions, or we may delay specific code review until the PR is more finalized / until we have more time. **Automation that can help us:** Many repositories have a template for pull request descriptions, which will include a checklist that must be completed before the PR can be merged. You don’t have to do all of these things to get an initial PR, but they definitely help. Those many include things like: + - **Tests!** When you open a PR, some tests and code checks will run. (For security reasons, some may need to be approved by a maintainer.) We will not merge any PRs with failing tests. If you’re not sure why a test is failing, please say so, and we’ll do our best to get to the bottom of it together. -- **Contributor License Agreement** (CLA): This ensures that we can merge your code, without worrying about unexpected implications for the copyright or license of open source dbt software. For more details, read: ["Contributor License Agreements"](contributor-license-agreements) +- **Contributor License Agreement** (CLA): This ensures that we can merge your code, without worrying about unexpected implications for the copyright or license of open source dbt software. For more details, read: ["Contributor License Agreements"](../resources/contributor-license-agreements.md) - **Changelog:** In projects that include a number of changes in each release, we need a reliable way to signal what's been included. The mechanism for this will vary by repository, so keep an eye out for notes about how to update the changelog. **If my PR is lingering...?** This happens, despite our best efforts—we promise it’s not intentional! If it’s been more than two weeks, we’d encourage you to leave a comment tagging `@dbt-labs/oss-maintainers`. reminding us that you’re awaiting review. For best results, say what in particular you’d like feedback on, and explain what would it mean to you to have the change merged in. diff --git a/website/docs/community/resources/oss-projects.md b/website/docs/community/resources/oss-projects.md new file mode 100644 index 00000000000..50dddde2f15 --- /dev/null +++ b/website/docs/community/resources/oss-projects.md @@ -0,0 +1,25 @@ +# Open source projects + +Looking for a good place to get involved contributing code? dbt Labs supports the following OSS repos, organized by the language primarily needed for contribution: + +## Python + +- [dbt-core](https://github.com/dbt-labs/dbt-core/discussions) - the primary shared functionality powering dbt +- [hubcap](https://github.com/dbt-labs/hubcap) - the code powering the dbt Package hub +- adapters - [the warehouse specific code that wires core up to various platforms](https://docs.getdbt.com/docs/contributing/adapter-development/1-what-are-adapters), several major platforms’ development is supported by dbt Labs: + - [dbt-bigquery](https://github.com/dbt-labs/dbt-bigquery) + - [dbt-snowflake](https://github.com/dbt-labs/dbt-snowflake) + - [dbt-redshift](https://github.com/dbt-labs/dbt-redshift) + - [dbt-spark](https://github.com/dbt-labs/dbt-spark) + +## dbt + +- [dbt Labs' packages](https://hub.getdbt.com/dbt-labs/) - the dbt pacakges created and supported by dbt Labs. Packages are just dbt projects, so if you know the SQL, Jinja, and YAML necessary to work in dbt, you can contribute to packages. + +## YAML and JSON Config + +- [dbt-jsonschema](https://github.com/dbt-labs/dbt-jsonschema) - powering completion and linting for YAML configuration in dbt projects. + +## Shell + +- [dbt-completion.bash](https://github.com/dbt-labs/dbt-completion.bash) - provides shell completion of CLI commands and selectors such as models and tests for bash and zsh. diff --git a/website/docs/docs/contributing/slack-rules-of-the-road.md b/website/docs/community/resources/slack-rules-of-the-road.md similarity index 90% rename from website/docs/docs/contributing/slack-rules-of-the-road.md rename to website/docs/community/resources/slack-rules-of-the-road.md index bac70dd674d..27774f9e35e 100644 --- a/website/docs/docs/contributing/slack-rules-of-the-road.md +++ b/website/docs/community/resources/slack-rules-of-the-road.md @@ -3,7 +3,7 @@ title: "dbt Slack: Rules of the Road" id: "slack-rules-of-the-road" --- -As of February 2022, the dbt Slack community includes 25,000+ data professionals and is growing month-over-month. People genuinely love this community. It’s filled with smart, kind, and helpful people who share our commitment to elevating the analytics profession. +As of October 2022, the dbt Slack community includes 35,000+ data professionals and is growing month-over-month. People genuinely love this community. It’s filled with smart, kind, and helpful people who share our commitment to elevating the analytics profession. We are committed to maintaining the spirit of this community, and as such have written these rules to help new members understand how to best participate in our community. @@ -18,13 +18,13 @@ Everyone interacting in dbt Slack, codebase, issue trackers, and mailing lists a ## Rule 2: Use the right channel It’s important that we make it possible for members of the community to opt-in to various types of conversations. Our different Slack channels specifically exist for this purpose. Our members do a wonderful job at making sure messages are posted in the most relevant channel, and you’ll frequently see people (respectfully!) reminding each other about where to post messages. Here's a guide to our channels: -- If you're new to dbt and unsure where something belongs, feel free to post in **#dbt-beginners** - we'll be able to direct you to the right place +- If you're new to dbt and unsure where something belongs, feel free to post in **#advice-dbt-for-beginners** - we'll be able to direct you to the right place - **For job postings, use #jobs**. If you post a job description outside of #jobs, we will delete it and send you a link to this rule. - For database-specific questions, use **#db-snowflake**, **#db-bigquery**, **#db-redshift**, or similar. - For questions about data modeling or for SQL help, use **#modeling** - For conversations unrelated to dbt or analytics, consider if dbt Slack is an appropriate medium for the conversation. If so, use **#memes-and-off-topic-chatter**. -If you're hitting an error, consider posting to [Stack Overflow](https://stackoverflow.com/questions/ask?tags=dbt) — we're moving more troubleshooting questions to Stack Overflow. A community member might summon Slackbot to let you know that Stack Overflow is a more appropriate medium by posting the words `stack overflow bot` as a reply to your message. +If you're hitting an error, you should post your question in [the Community Forum](https://discourse.getdbt.com) instead. ## Rule 3: Put effort into your question dbt Slack is a community of volunteers. These are kind, knowledgeable, helpful people who share their time and expertise for free. diff --git a/website/docs/community/resources/speaking-at-a-meetup.md b/website/docs/community/resources/speaking-at-a-meetup.md new file mode 100644 index 00000000000..cc1a424139e --- /dev/null +++ b/website/docs/community/resources/speaking-at-a-meetup.md @@ -0,0 +1,249 @@ +--- +title: "How to deliver a fantastic meetup talk" +id: "speaking-at-a-meetup" +--- + +# How to deliver a fantastic meetup talk + +**Speaking at a dbt meetup? Here’s all the details you’ll need to know. If you’re speaking at another event, check out our additional tips at the end of the article.** + +## Understanding dbt meetups + +dbt meetups are an opportunity for the dbt community to learn from each other. We’re typically on the lookout for talks that last for ~15 minutes, and we reserve an additional 5-10 minutes for Q&A after your talk. + +We’re not *just* looking for talks that feature dbt — if your topic feels relevant to analytics engineers, we’d love to chat. + +In general, you can assume that around three quarters of the audience are dbt users. When shaping your talk, consider whether there’s something in there that might be new to an experienced dbt user, and, on the other end of the scale, something that feels relevant to a data practitioner that isn’t yet a dbt user. + +If you feel that your talk idea requires in-depth knowledge of dbt, consider speaking on Office Hours instead. Similarly, if you’re interested in giving a more introductory talk about dbt, consider reaching out to a local data meetup to see if it’s the right fit. + +For topic inspiration, you can find videos of past dbt meetup presentations [here](https://www.youtube.com/playlist?list=PL0QYlrC86xQn-jxWmEqtQRbZoyjq_ffq5). + +If you want to present at a dbt meetup, let us know [here](https://docs.google.com/forms/d/e/1FAIpQLScU4c0UvXLsasc7uwFBrzt6YzuGiMzEH_EyFfXGnIYDmTBDfQ/viewform). If we haven’t met you before, we might book-in a call to say hi and help shape your topic! We’ll also book a meeting before the event for a dry-run of the presentation to give any additional feedback. + + +## Recognize when you’re ready to give a talk +Below, we’ve listed four signs that you’re ready to give a talk (originally based on [this article](https://thinkgrowth.org/how-to-write-about-your-work-652441747f41) from our Head of Marketing, Janessa — read that too!). We’ve also included examples for each category — where possible these are dbt meetup talks, but some of them are also links to blog posts from members in our community. + +### You recently finished a high-impact project +These are a great option for first-time speakers as they mix together both big-picture thinking and tactics. For example: + +- "Improving data reliability" — Andrea Kopitz ([video](https://www.youtube.com/watch?v=M_cNspn2XsE), [slides](https://docs.google.com/presentation/d/1gHChax5aM3tqKkhepX7Mghmg0DTDbY5yoDBCfUR23lg/)) +- "Predicting customer conversions using dbt + machine learning" — Kenny Ning ([video](https://www.youtube.com/watch?v=BF7HH8JDUS0), [slides](https://docs.google.com/presentation/d/1iqVjzxxRggMnRoI40ku88miDKw795djpKV_v4bbLpPE/)) +- "Migrating 387 models from Redshift to Snowflake" — Sam Swift and Travis Dunlop ([video](https://www.youtube.com/watch?v=VhH614WVufM), [slides](https://docs.google.com/presentation/d/1wE8NSkFPLFKGQ8fvFUUKoZFVoUhws_FhFip-9mDhoPU/)) + +### You hit an inflection point in your career +Have you recently changed something about your career that you think others can learn from? Started a new job, grown in your role? These topics might not mention dbt at all, but will be relevant to many people in the audience. For example: + +- “Getting hired as an analytics engineer: a candidate’s perspective” — Danielle Leong ([video](https://www.youtube.com/watch?v=6VCr30ZFxZ0)) +- “One analyst's guide for going from good to great” — Jason Ganz ([blog post](https://blog.getdbt.com/one-analysts-guide-for-going-from-good-to-great/)) + +Other ideas: +- You moved from a team of many to a team of one (or vice-versa), and want to share what each can learn from the other +- You started to manage others and learned some things along the way + +### You’re digging deep into a topic +If you’ve spent many hours going deep on a topic, it could be a good idea to share what you’ve learned. For example: + +- “The farm-to-table testing framework” — Andrea Fabry ([blog post](https://blog.getdbt.com/data-testing-framework/)) +- “How to create a career ladder” — Caitlin Moorman ([blog post](https://locallyoptimistic.com/post/career-ladders-part-2/)) + +### You have a strong opinion about something +Is there a “best practice” that you think is outdated? Want to convince others to see your point of view? In the data-space, we’ve seen this in topics like: + +- “Engineers shouldn’t write ETL” — Jeff Magnusson ([blog post](https://multithreaded.stitchfix.com/blog/2016/03/16/engineers-shouldnt-write-etl/)) +- “You probably don’t need a data dictionary” — Michael Kaminsky & Alexander Jia ([blog post](https://locallyoptimistic.com/post/data_dictionaries/)) +- “Write better SQL: In defense of `group by 1`” — Claire Carroll ([blog post](https://blog.getdbt.com/write-better-sql-a-defense-of-group-by-1/)) + +## Checkpoint: Is someone else well-placed to give this talk? +Once you have a topic idea, stop for a moment and consider whether someone else on your team might also be a great fit for delivering this talk. Individuals from underrepresented groups are far less likely to self-nominate to give a talk — sometimes a shoulder tap is the nudge that’s needed. + +## Shaping your talk +Now, it’s time to write! Rather than starting with a slide deck, open up a blank document (or use [our template](https://docs.google.com/document/d/16aog0VitdLSScgxSNKe36q1C92QmG2vjXmtXYcPAhfw/edit#)), and start writing some notes. This helps you clarify your thinking, and is a great way to get feedback early, rather than investing the time into creating slides that might later be reworked. + +Don’t get too hung up on a title at this stage — we’re happy to work with you on that later in the process. + +### The basic structure +Below, we’ve outlined a common structure used for meetup talks — if this is your first talk, this is a great way to get started (in fact, even experienced speakers often use a structure like this). Use this as a starting point, rather than an exact formula! + +###### 1. What is the business problem? + +Relating to a business problem helps audience members understand why you undertook a project. For example: +- The finance team didn’t trust our numbers +- We were never sure what led to an increase in customer conversion +- The data team couldn’t find a balance between ad hoc requests and roadmap work +- Our tracking across mobile and web was completely inconsistent + +###### 2. How did this manifest? +Include evidence that this is a genuine problem — this helps create buy-in from the audience. Slack screenshots, quotes, charts, etc. are all good here! + +###### 3. What tactics were used to solve the problem? +Three feels like a good number here. Make sure to emphasize people and process solutions as well as technology solutions. + +###### 4. What was the impact on the business problem? +Since you set out a problem to be solved, it’s worth revisiting it. It’s okay if you found that your project didn’t go as planned — there’s a valuable lesson in there. Again, including evidence of improvement feels valuable. + +###### 5. What other things were learned, and/or what next steps are you taking? +Summarize high level lessons that others can take-away, and potentially talk about what you’d do differently, or what you plan on doing next. + +### Why does this structure work? + +The above structure might seem formulaic, but we’ve seen it work a number of times. In our opinion, this structure works because: + +- **Your presentation has the structure of a story** — problem, journey, solution. Human beings love stories, and so the flow feels natural and easy for your audience to follow. +- **It increases the target audience**. Sharing a few different tactics means that it’s more likely there will be something in your talk for different audience members. Compare that to narrowly scoping a talk on “[Writing packages when a source table may or may not exist](https://discourse.getdbt.com/t/writing-packages-when-a-source-table-may-or-may-not-exist/1487)”— it’s not going to feel relevant to most people in the room. +- **It covers both theory and application.** Too much theory and you’re giving a TedTalk, too much application and you’re just giving a product demo. The best Meetup talks help people understand how you thought through a problem and why you made certain decisions so they can apply your knowledge within their unique context. + +### Examples that follow this structure + +Here's a few of our favorite talks mapped to the structure — trust us, it works! + +#### Improving data reliability — Andrea Kopitz, Envoy + +*[Video](https://www.youtube.com/watch?v=M_cNspn2XsE), [slides](https://docs.google.com/presentation/d/1gHChax5aM3tqKkhepX7Mghmg0DTDbY5yoDBCfUR23lg/).* + +###### 1. What is the business problem? +Envoy’s financial data appeared inconsistent. + +###### 2. How did this manifest? +Respondents to the team’s data survey said they no longer trusted the data. + +###### 3. What tactics were used to solve the problem? +1. Determine responsibility +2. Build more specific dbt tests +3. Track progress + +###### 4. What was the impact on the business problem? +In their next data survey, satisfaction rating increased, and no mention of financial data accuracy. + +###### 5. What other things were learned, and/or what next steps are you taking? +Lesson: Send out a data survey to your company to inform your roadmap. + +#### Predicting customer conversions with dbt + machine learning — Kenny Ning, Better.com + + +*[Video](https://www.youtube.com/watch?v=BF7HH8JDUS0), [slides](https://docs.google.com/presentation/d/1iqVjzxxRggMnRoI40ku88miDKw795djpKV_v4bbLpPE/).* + +###### 1. What is the business problem? +No one knew why conversion rates for better.com customers would improve or worsen, making it difficult to know the value of different parts of the business. + +###### 2. How did this manifest? +Different parts of the business took responsibility when it improved, no one took responsibility when it worsened. + +###### 3. What tactics were used to solve the problem? +1. Use a different approach to conversion rates — kaplan-meier conversion rates +2. Sketch out an ideal ML solution and see if it theoretically solves the problem +3. Build it! (ft. demonstration of solution) + +###### 4. What was the impact on the business problem? +In the end — not as valuable as originally hoped (and that’s ok!). Editor note: [this article](https://better.engineering/2020-06-24-wizard-part-ii/) was a great follow up on the initial project. + +###### 5. What other things were learned, and/or what next steps are you taking? +- Focus on end-to-end solutions +- Materialize your clean dataset to improve collaboration +- Sell to the business + +#### Migrating 387 models from Redshift to Snowflake — Bowery Farming Data Team + +*[Video](https://www.youtube.com/watch?v=VhH614WVufM), [slides](https://docs.google.com/presentation/d/1wE8NSkFPLFKGQ8fvFUUKoZFVoUhws_FhFip-9mDhoPU/).* + +###### 1. What is the business problem? +A new Bowery Farming site had increased the amount of data the team were dealing with, which put a strain on their data stack. + +###### 2. How did this manifest? +Charts show increased dbt run times, and increased Redshift costs. + +###### 3. What tactics were used to solve the problem? +1. Push Redshift to its limit: Leverage Athena, Redshift configurations, separate clusters, python pre-processing +2. Trial Snowflake for cost and performance +3. Commit to a migration with strong project management + +###### 4. What was the impact on the business problem? +Yet to be determined (at the time, they had just finished the project). But the team showed evidence that the project has been successfully completed! + +###### 5. What other things were learned, and/or what next steps are you taking? +Other things learned: +- Differences between Redshift and Snowflake SQL syntax +- Teamwork and coordination are key to completing a migration + +## Turn it into a presentation +Now, it's time to take your idea and turn it into a presentation. + +### Structuring your slides +As well as the slides that directly support your content, consider including: +- At the start: + - An intro slide for yourself (and teammates) + - An intro slide for your company — you might also include some impressive numbers about your business, after all, your audience is full of people who love numbers! + - Potentially include your tech stack for context — there’s no need to spend too much time on this, most audience members will be familiar with the tools. +- Before diving into the specific tactics used: + - Use a slide to list the three tactics at a high level — this signposting helps set expectations for audience members. +- At the end: + - A closing slide to prompt questions, and list your contact details. + - If your company is hiring, mention that too! + +If available, use your corporate-branded slide deck. We also have dbt-branded slides if you want to use those. + +### Making your presentation shine + +When turning your story into a presentation, also consider doing the following: + +#### Use full sentences in your slide headings + +When presenting (especially virtually), it’s hard to hold everyone’s focus. That’s ok! By including full sentences as your heading, people can “hook” back into the presentation. For example, rather than having a slide on "Slide headings", use a title like “Use full sentences in your slide headings” (woah — meta!) + +#### Make your slides accessible + +This is a [great guide](https://www.smashingmagazine.com/2018/11/inclusive-design-accessible-presentations/) on making your slides accessible — read it! + +#### Use evidence in your slides + +Evidence is a key part of getting buy-in that the story you’re telling is valuable. Consider including: + +- Screenshots of slack conversations +- Quotes, survey results, charts +- If talking about a complex transformation, include small samples of data to demonstrate the concept. You may need to generate some fake data to simplify the problem (example) +- If one of your tactics is heavily code-based, consider sharing that code in a separate piece so that interested folks can refer back to it later. (Discourse is great for this) + +#### (Virtual events) Create moments for interactivity + +For virtual events: is there a poll you can launch, or a question you can throw out to the chat? This can help create a sense of community at the event. + +## Pair it with a blog post + +The hardest part of nailing a great talk is the content, so if you’ve made it this far, you’ve already done most of the work. Turning your content into a blog post is a great way to solidify your thinking, and get some extra exposure. If you’d like to be features on the [dbt Blog](https://blog.getdbt.com/), reach out to us (@Claire and @Janessa) on Slack. + +We’ll also be adding more resources on how to write about your work soon! + +## Speaking at a non-dbt event + +Above, we’ve given specific advice for speaking at a dbt meetup. If you’re a dbt community member who wants to speak at a non-dbt meetup or conference, there’s a few extra ways you can adjust your process. + +### Questions to ask the event organizer + +#### What is the technical baseline for the audience? + +Do they know about dbt? If not, are they familiar with SQL? You’ll likely have a range of people in the audience so there won’t be one exact answer, but gathering information about the median knowledge is useful. As a guideline, aim to teach something new to at least half of the audience. + +#### What kind of talks have been the most successful? + +Is the event oriented around technical talks or strategic talks? Is there an expectation of demo-ing code? Do they have past examples of talks that were well-received, or any tips? + +#### What are the event logistics? + +How long is your talk supposed to go for? Is there an opportunity to do Q&A? + +If the event is virtual, what is the software setup like? How will questions be moderated? + +If the event is in-person, will you be able to use your own computer, or will you use someone else’s? What sort of screen is there? How do you connect to it? And do you have the right dongle for your MacBook Pro? + +#### Is there an opportunity for topic feedback? + +Is the organizer interested in working with you to make your topic great? If not, can they point you to someone in their community who might be interested in helping out? + +#### Are there any additional accessibility considerations you should be aware of? + +Do any audience members use a communication device? Can you share your slides ahead of time to make them easier for audience members to access? Will the event be recorded for those who can’t attend in person? + +### Responding to a conference Call for Speakers + +If you’re submitting a response for a Call for Speakers, and talking about dbt, we’re happy to work with you on this. Reach out to us (@Claire and @Janessa) in Slack to let us know! diff --git a/website/docs/community/resources/vendor-guidelines.md b/website/docs/community/resources/vendor-guidelines.md new file mode 100644 index 00000000000..1b6bb6c9511 --- /dev/null +++ b/website/docs/community/resources/vendor-guidelines.md @@ -0,0 +1,66 @@ +--- +title: "Vendor guidelines" +id: "vendor-guidelines" +--- + +# Engaging in the dbt Community as a Vendor + +A key aspect that makes dbt stand out from other tools is the dbt Community. +This community was built to drive our mission statement of empowering analysts. +This includes advancing the field of analytics engineering practices. +We are creating spaces where folks can learn from each other, share best practices, +discover what it means to use software engineering workflows, and so on. + +The dbt community extends far beyond what happens in dbt Slack. There are regular meetups, +blog posts, and even a conference! Our North Star is to extend the knowledge loop; +we are a community, not an audience. + +Our community members expect a thoughtful space full of kind, curious, and bright individuals. +They contribute to the knowledge loop with their own expertise and benefit from the relevant knowledge brought to the table by other community experts (including vendors). +Along those lines, **we value diversity and inclusion**. +We seek to amplify underrepresented communities and have no tolerance for anyone who is disrespectful in this space. + +As a vendor/dbt partner, you are also a member of this community, one that we want +and deeply encourage to share your expertise in tooling, analytics, etc. +Our community members are truly open to discovering and discussing innovative solutions and tools. +We have seen folks grow fantastic user relationships for their products when they come in with the mindset to share rather than pushing a pitch. + +To guide you on your community journey, we have created this document for you to read and share with your coworkers. +By following these guidelines, you will help us maintain this community as well as gain +full access to all the benefits that this community can provide. + + +## Dos & Don'ts for dbt Slack + +### Dos +- **Read the Rules of The Road.** These rules are the best ways to participate in our community. +- **Fill your profile!** We want to get to know you so do upload a picture of yourself and add your company in your name (e.g. "Alice (DataCo)"). Be sure to include your company in your profile so folks know that you work for a vendor +- **Introduce Yourself in #introductions.** Tell us about yourself! +- **Be helpful.** We encourage folks to answer questions and offer their product expertise to conversations already in motion. You can even invite folks to chat in DMs if anyone wants more info about your product. But be sure you identify yourself and your business interests in thread. +- **Be yourself when posting, speak in your own voice.** +- **Participate in all the conversations that interest you.** Make a meme if you’re so inclined. Get in a (friendly) debate. You are not limited to only your company's products and services. +- **Post with intention.** If you have a link or product update that is appropriate to share, give context. + +### Don'ts +- **Do not do 1:1 outbound.** Only initiate DMs if you’ve received active confirmation in a public channel that a DM would be welcome. +- **Do not be anonymous.** Folks who identify themselves clearly are able to build empathy and form genuine relationships much easier. This is what we want for the community. +- Spam channels with Marketing material. +- **Do not post without context.** Posts that include context outside of just the pitch are the ones that add value to our community. + + +## Summary + +This community is centered around feeding into the knowledge loop. It’s a place intended for building genuine, helpful connections. We found that most vendors find success in our space by leading with this intention. + +Here are some ways you can contribute to the community: + +- contribute to the dbt core repository +- write dbt packages +- write other public content (blog posts, case studies, etc.) +- respond to questions on slack / discourse +- host events +- promote / respond to content written by community members +- Partner up with community members on blog posts/code/etc. + +For more information on the thought behind our community, especially if you are interested in creating your own, feel free to +reach out to our community managers. diff --git a/website/docs/docs/about/viewpoint.md b/website/docs/community/resources/viewpoint.md similarity index 97% rename from website/docs/docs/about/viewpoint.md rename to website/docs/community/resources/viewpoint.md index 9601a3fd46f..e159c6178a3 100644 --- a/website/docs/docs/about/viewpoint.md +++ b/website/docs/community/resources/viewpoint.md @@ -1,9 +1,9 @@ --- -title: "Viewpoint" +title: "The dbt Viewpoint" id: "viewpoint" --- -:::info Building a Mature Analytics Workflow: The dbt Viewpoint +:::info Building a Mature Analytics Workflow: The dbt Viewpoint! In 2015-2016, a team of folks at RJMetrics had the opportunity to observe, and participate in, a significant evolution of the analytics ecosystem. The seeds of dbt were conceived in this environment, and the viewpoint below was written to reflect what we had learned and how we believed the world should be different. **dbt is our attempt to address the workflow challenges we observed, and as such, this viewpoint is the most foundational statement of the dbt project's goals.** @@ -53,7 +53,7 @@ Analytics requires multiple environments. Analysts need the freedom to work with Analytics teams should stand behind the accuracy of all analysis that has been promoted to production. Errors should be treated with the same level of urgency as bugs in a production product. Any code being retired from production should go through a deprecation process. ### Design for maintainability -Most of the cost involved in software development is in the maintenance phase. Because of this, software engineers write code with an eye towards maintainability. Analytic code, however, is often fragile. Changes in underlying data break most analytic code in ways that are hard to predict and to fix. +Most of the cost involved in software development is in the maintenance phase. Because of this, software engineers write code with an eye towards maintainability. Analytic code, however, is often fragile. Changes in underlying data break most analytic code in ways that are hard to predict and to fix. Analytic code should be written with an eye towards maintainability. Future changes to the schema and data should be anticipated and code should be written to minimize the corresponding impact. diff --git a/website/docs/dbt-cli/cli-overview.md b/website/docs/dbt-cli/cli-overview.md index 2a642c86d55..7a911a7eb95 100644 --- a/website/docs/dbt-cli/cli-overview.md +++ b/website/docs/dbt-cli/cli-overview.md @@ -5,7 +5,7 @@ description: "Run your dbt project from the command line." dbt Core ships with a command-line interface (CLI) for running your dbt project. The dbt CLI is free to use and available as an [open source project](https://github.com/dbt-labs/dbt-core). -When using the command line, you can run commands and do other work from the current or _working directory_ on your computer. Before running the dbt project from the command line, make sure the working directory is your dbt project directory. For more details, see "[Creating a dbt project](/docs/building-a-dbt-project/projects)." +When using the command line, you can run commands and do other work from the current or _working directory_ on your computer. Before running the dbt project from the command line, make sure the working directory is your dbt project directory. For more details, see "[Creating a dbt project](/docs/build/projects)." diff --git a/website/docs/dbt-cli/install/docker.md b/website/docs/dbt-cli/install/docker.md deleted file mode 100644 index e71b728081b..00000000000 --- a/website/docs/dbt-cli/install/docker.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -title: "Use Docker to install dbt" -description: "You can use Docker to install dbt and adapter plugins from the command line." ---- - -By v1.0.0, dbt Core and all adapter plugins maintained by dbt Labs will be available as official Docker images, -and available from a public registry. -We recommend you use Docker to install in production because it includes dbt Core and all of its dependencies. You might also use a Docker to install and develop locally if you don't have your python environment set up. - -More information coming soon! diff --git a/website/docs/dbt-cli/install/overview.md b/website/docs/dbt-cli/install/overview.md deleted file mode 100644 index a5848dc4efe..00000000000 --- a/website/docs/dbt-cli/install/overview.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -id: "overview" -title: "How to install dbt" -description: "You can install dbt Core using a few different tested methods." ---- - -You can install dbt Core on the command line by using one of these recommended methods: - -- [Use Homebrew to install dbt](install/homebrew) (recommended for MacOS + most popular plugins) -- [Use pip to install dbt](install/pip) -- [Use a Docker image to install dbt](install/docker) -- [Install dbt from source](install/from-source) - - -## About dbt adapters - -dbt works with a number of different databases, query engines, and other SQL-speaking technologies. It does this by using a dedicated _adapter_ for each. When you install dbt, you'll also want to install the specific adapter for your database. For more details, see the [list of available adapters](available-adapters). diff --git a/website/docs/docs/about/license.md b/website/docs/docs/about/license.md deleted file mode 100644 index ab47907a12a..00000000000 --- a/website/docs/docs/about/license.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: "License" -id: "license" ---- - -[dbt Core is licensed under the Apache 2.0 License](https://github.com/dbt-labs/dbt). - -dbt Cloud is proprietary. diff --git a/website/docs/docs/about/overview.md b/website/docs/docs/about/overview.md index 6685fd2cec4..f4ab1e6396b 100644 --- a/website/docs/docs/about/overview.md +++ b/website/docs/docs/about/overview.md @@ -5,7 +5,7 @@ id: "overview" # What is dbt? -dbt (data build tool) is a productivity tool that helps analysts get more done and produce higher quality results. +dbt is a productivity tool that helps analysts get more done and produce higher quality results. Analysts commonly spend 50-80% of their time modeling raw data—cleaning, reshaping, and applying fundamental business logic to it. dbt empowers analysts to do this work better and faster. @@ -48,7 +48,7 @@ While there are a large number of great languages for manipulating data, we’ve # What databases does dbt currently support? -View the full list of supported databases, warehouses, and query engines [here](available-adapters). +See [Supported Data Platforms](supported-data-platforms) to view the full list of supported databases, warehouses, and query engines. # How do I get started? diff --git a/website/docs/docs/available-adapters.md b/website/docs/docs/available-adapters.md deleted file mode 100644 index 2f12e2dbae5..00000000000 --- a/website/docs/docs/available-adapters.md +++ /dev/null @@ -1,72 +0,0 @@ ---- -title: "Available adapters" -id: "available-adapters" ---- - -dbt connects to and runs SQL against your database, warehouse, platform, or query engine. It works by using a dedicated **adapter** for each technology. All the adapters listed below are open source and free to use, just like dbt. - -If you have a new adapter, please add it to this list via a pull request! See [Documenting your adapter](/website/docs/docs/contributing/documenting-a-new-adapter.md) for more information. - -### Installation - -Most adapters can be installed from PyPi using `pip`. The installation will include `dbt-core` and any other required dependencies, which may include other adapter plugins. Read more about [installing dbt](dbt-cli/install/overview). - -Some vendor or community adapters may not exist in PyPi. However, you can still install an adapter hosted on GitHub with `pip install`, by replacing MAINTAINER_NAME with the person or company maintaining the adapter on GitHub and ADAPTER_NAME with the git repository's name (these can be taken directly from the adapter's url): - -```shell -pip install git+https://github.com/MAINTAINER_NAME/ADAPTER_NAME.git -``` - -### dbt Labs Supported - -In addition to maintaining `dbt-core`, [dbt Labs](https://github.com/dbt-labs) maintains a set of adapters for some of the most common databases, warehouses, and platforms. (✅ indicates "full support.") - -| Adapter for | Documentation | Core features | dbt Cloud | Install from PyPi | -| ------------ | ------------- | ------------- | --------- | ----------------- | -| Postgres | [Profile Setup](postgres-profile) | ✅ | ✅ | `pip install dbt-postgres` | -| Redshift | [Profile Setup](redshift-profile), [Configuration](redshift-configs) | ✅ | ✅ | `pip install dbt-redshift` | -| BigQuery | [Profile Setup](bigquery-profile), [Configuration](bigquery-configs) | ✅ | ✅ | `pip install dbt-bigquery` | -| Snowflake | [Profile Setup](snowflake-profile), [Configuration](snowflake-configs) | ✅ | ✅ | `pip install dbt-snowflake` | -| Apache Spark | [Profile Setup](spark-profile), [Configuration](spark-configs) | ✅ | ✅ | `pip install dbt-spark[PyHive]` | - -### Vendor Supported - -These adapter plugins are built and maintained by the same people who build and maintain the complementary data technology. - -| Adapter for | Documentation | Install from PyPi | -| ------------ | ------------- | ----------------- | -| Databricks ([dbt-databricks](https://github.com/databricks/dbt-databricks)) | [Profile Setup](databricks-profile), [Configuration](spark-configs#databricks-configurations) | `pip install dbt-databricks` | -| Firebolt ([dbt-firebolt](https://github.com/firebolt-db/dbt-firebolt)) | [Profile Setup](firebolt-profile), [Configuration](firebolt-configs) | `pip install dbt-firebolt` | -| Materialize ([dbt-materialize](https://github.com/MaterializeInc/materialize/blob/main/misc/dbt-materialize)) | [Profile Setup](materialize-profile), [Configuration](materialize-configs) | `pip install dbt-materialize` | -| Oracle ([dbt-oracle](https://github.com/oracle/dbt-oracle)) | [Profile Setup](oracle-profile) | `pip install dbt-oracle` | -| Rockset ([dbt-rockset](https://github.com/rockset/dbt-rockset)) | [Profile Setup](rockset-profile) | `pip install dbt-rockset` | -| SingleStore ([dbt-singlestore](https://github.com/memsql/dbt-singlestore)) | [Profile Setup](singlestore-profile) | `pip install dbt-singlestore` | -| Starburst & Trino ([dbt-trino](https://github.com/starburstdata/dbt-trino)) | [Profile Setup](trino-profile) | `pip install dbt-trino` | -| Teradata ([dbt-teradata](https://github.com/teradata/dbt-teradata)) | [Profile Setup](teradata-profile), [Configuration](teradata-configs) | `pip install dbt-teradata` | -| TiDB ([dbt-tidb](https://github.com/pingcap/dbt-tidb)) | [Profile Setup](tidb-profile) | `pip install dbt-tidb` | - - -### Community Supported - -These adapter plugins are contributed and maintained by members of the community 🌱 - -| Adapter for | Documentation | Notes | Install with pip | -|------------------------|---------------------------------------|---------------------------|------------------------------| -| SQL Server & Azure SQL | [Profile Setup](mssql-profile) | SQL Server 2016 and later | `pip install dbt-sqlserver` | -| Azure Synapse | [Profile Setup](azuresynapse-profile) | Azure Synapse 10+ | `pip install dbt-synapse` | -| Exasol Analytics | [Profile Setup](exasol-profile) | Exasol 6.x and later | `pip install dbt-exasol` | -| Dremio | [Profile Setup](dremio-profile) | Dremio 4.7+ | `pip install dbt-dremio` | -| ClickHouse | [Profile Setup](clickhouse-profile) | ClickHouse 20.11+ | `pip install dbt-clickhouse` | -| Athena | [Profile Setup](athena-profile) | Athena engine version 2 | `pip install git+https://github.com/Tomme/dbt-athena.git` | -| Vertica | [Profile Setup](vertica-profile) | Vertica 10.0+ | `pip install dbt-vertica` | -| AWS Glue | [Profile Setup](glue-profile), [Configuration](glue-configs) | Glue 2.0+ | `pip install dbt-glue` | - -Community-supported plugins are works in progress, and anyone is welcome to contribute by testing and writing code. If you're interested in contributing: -- Join both the dedicated #adapter-ecosystem channel in [dbt Slack](https://community.getdbt.com/) and the channel for your adapter's data store (e.g. #db-sqlserver, #db-athena) -- Check out the open issues in the plugin's source repository - -Note that, while no community plugins are currently supported in dbt Cloud, we expect this to change in the near future. - -## Creating a new adapter - -dbt can be extended to any SQL-speaking database, warehouse, data lake, query engine, or analytical platform by means of an _adapter plugin_. These plugins can be built as separate Python modules, and dbt will discover them if they are installed on your system. If you see something missing from the lists above, and you're interested in developing an integration, read more about [building a new adapter](building-a-new-adapter). diff --git a/website/docs/docs/building-a-dbt-project/analyses.md b/website/docs/docs/build/analyses.md similarity index 100% rename from website/docs/docs/building-a-dbt-project/analyses.md rename to website/docs/docs/build/analyses.md diff --git a/website/docs/docs/building-a-dbt-project/building-models/using-custom-aliases.md b/website/docs/docs/build/custom-aliases.md similarity index 89% rename from website/docs/docs/building-a-dbt-project/building-models/using-custom-aliases.md rename to website/docs/docs/build/custom-aliases.md index c382c2c6662..53671291ef8 100644 --- a/website/docs/docs/building-a-dbt-project/building-models/using-custom-aliases.md +++ b/website/docs/docs/build/custom-aliases.md @@ -1,6 +1,6 @@ --- -title: "Using aliases" -id: "using-custom-aliases" +title: "Custom aliases" +id: "custom-aliases" --- ## Overview @@ -8,7 +8,7 @@ id: "using-custom-aliases" When dbt runs a model, it will generally create a relation (either a `table` or a `view`) in the database. By default, dbt uses the filename of the model as the identifier for this relation in the database. This identifier can optionally be overridden using the `alias` model configuration. ### Why alias model names? -The names of schemas and tables are effectively the "user interface" of your data warehouse. Well-named schemas and tables can help provide clarity and direction for consumers of this data. In combination with [custom schemas](using-custom-schemas), model aliasing is a powerful mechanism for designing your warehouse. +The names of schemas and tables are effectively the "user interface" of your . Well-named schemas and tables can help provide clarity and direction for consumers of this data. In combination with [custom schemas](/docs/build/custom-schemas), model aliasing is a powerful mechanism for designing your warehouse. ### Usage The `alias` config can be used to change the name of a model's identifier in the database. The following shows examples of database identifiers for models both with, and without, a supplied `alias`. @@ -50,7 +50,7 @@ select * from {{ ref('snowplow_sessions') }} ### generate_alias_name -The alias generated for a model is controlled by a macro called `generate_alias_name`. This macro can be overridden in a dbt project to change how dbt aliases models. This macro works similarly to the [generate_schema_name](using-custom-schemas#advanced-custom-schema-configuration) macro. +The alias generated for a model is controlled by a macro called `generate_alias_name`. This macro can be overridden in a dbt project to change how dbt aliases models. This macro works similarly to the [generate_schema_name](/docs/build/custom-schemas#advanced-custom-schema-configuration) macro. To override dbt's alias name generation, create a macro named `generate_alias_name` in your own dbt project. The `generate_alias_name` macro accepts two arguments: @@ -113,4 +113,4 @@ Compilation Error - model.my_project.sessions (models/sessions.sql) ``` -If these models should indeed have the same database identifier, you can work around this error by configuring a [custom schema](using-custom-schemas) for one of the models. +If these models should indeed have the same database identifier, you can work around this error by configuring a [custom schema](/docs/build/custom-schemas) for one of the models. diff --git a/website/docs/docs/building-a-dbt-project/building-models/using-custom-databases.md b/website/docs/docs/build/custom-databases.md similarity index 94% rename from website/docs/docs/building-a-dbt-project/building-models/using-custom-databases.md rename to website/docs/docs/build/custom-databases.md index c466c7d44b3..75f9ee45d46 100644 --- a/website/docs/docs/building-a-dbt-project/building-models/using-custom-databases.md +++ b/website/docs/docs/build/custom-databases.md @@ -1,5 +1,6 @@ --- -title: "Using custom databases" +title: "Custom databases" +id: "custom-databases" --- @@ -55,7 +56,7 @@ select * from ... New in v0.16.0 -The database name generated for a model is controlled by a macro called `generate_database_name`. This macro can be overridden in a dbt project to change how dbt generates model database names. This macro works similarly to the [generate_schema_name](using-custom-schemas#advanced-custom-schema-configuration) macro. +The database name generated for a model is controlled by a macro called `generate_database_name`. This macro can be overridden in a dbt project to change how dbt generates model database names. This macro works similarly to the [generate_schema_name](/docs/build/custom-schemas#advanced-custom-schema-configuration) macro. To override dbt's database name generation, create a macro named `generate_database_name` in your own dbt project. The `generate_database_name` macro accepts two arguments: diff --git a/website/docs/docs/building-a-dbt-project/building-models/using-custom-schemas.md b/website/docs/docs/build/custom-schemas.md similarity index 90% rename from website/docs/docs/building-a-dbt-project/building-models/using-custom-schemas.md rename to website/docs/docs/build/custom-schemas.md index db13662bad7..7d92d727733 100644 --- a/website/docs/docs/building-a-dbt-project/building-models/using-custom-schemas.md +++ b/website/docs/docs/build/custom-schemas.md @@ -1,6 +1,6 @@ --- -title: "Using custom schemas" -id: "using-custom-schemas" +title: "Custom schemas" +id: "custom-schemas" --- ## What is a custom schema? @@ -55,7 +55,7 @@ models: ### Why does dbt concatenate the custom schema to the target schema? When first using custom schemas, it's common to assume that a model will be built in schema that matches the `schema` configuration exactly, for example, a model that has the configuration `schema: marketing`, would be built in the `marketing` schema. However, dbt instead creates it in a schema like `_marketing` by default – there's good reason for this! -In a typical setup of dbt, each dbt user will use a separate target schema (see [Managing Environments](using-custom-schemas#managing-environments)). If dbt created models in a schema that matches a model's custom schema exactly, every dbt user would create models in the same schema. +In a typical setup of dbt, each dbt user will use a separate target schema (see [Managing Environments](/docs/build/custom-schemas#managing-environments)). If dbt created models in a schema that matches a model's custom schema exactly, every dbt user would create models in the same schema. Further, the schema that your development models are built in would be the same schema that your production models are built in! Instead, concatenating the custom schema to the target schema helps create distinct schema names, reducing naming conflicts. @@ -140,26 +140,25 @@ The following context methods _are_ available in the `generate_schema_name` macr | Other macros in your project | Macro | ✅ | | Other macros in your packages | Macro | ✅ | - #### Which vars are available in generate_schema_name? -Variable semantics have changed in dbt v0.17.0. See the [migration guide](upgrading-to-0-17-0) +Variable semantics have changed in dbt v0.17.0. See the [migration guide](/guides/migration/versions) for more information on these changes. Globally-scoped variables and variables defined on the command line with -[--vars](using-variables) are accessible in the `generate_schema_name` context. +[--vars](/docs/build/project-variables) are accessible in the `generate_schema_name` context. ## Managing environments In the `generate_schema_name` macro examples shown above, the `target.name` context variable is used to change the schema name that dbt generates for models. If the `generate_schema_name` macro in your project uses the `target.name` context variable, you must additionally ensure that your different dbt environments are configured appropriately. While you can use any naming scheme you'd like, we typically recommend: - **dev**: Your local development environment; configured in a `profiles.yml` file on your computer. - - **ci**: A [continuous integration](cloud-enabling-continuous-integration-with-github) environment running on Pull Requests in GitHub, GitLab, etc. - - **prod**: The production deployment of your dbt project, eg. in dbt Cloud, Airflow, or [similar](running-dbt-in-production) +* **ci**: A [continuous integration](/docs/collaborate/git/connect-github) environment running on Pull Requests in GitHub, GitLab, etc. + - **prod**: The production deployment of your dbt project, like in dbt Cloud, Airflow, or [similar](/docs/deploy/deployments). If your schema names are being generated incorrectly, double check your target name in the relevant environment. -For more information, consult the [Managing environments](managing-environments) guide. +For more information, consult the [Managing environments](/docs/collaborate/environments) guide. diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name.md b/website/docs/docs/build/custom-target-names.md similarity index 64% rename from website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name.md rename to website/docs/docs/build/custom-target-names.md index 5ca26a977d8..4e14f36b784 100644 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name.md +++ b/website/docs/docs/build/custom-target-names.md @@ -1,6 +1,6 @@ --- -title: "Setting a custom target name" -id: "cloud-setting-a-custom-target-name" +title: "Custom target names" +id: "custom-target-names" description: "You can define a custom target name for any dbt Cloud job to correspond to settings in your dbt project." --- @@ -19,11 +19,11 @@ where created_at > date_trunc('month', current_date) {% endif %} ``` -To set a custom target name for a job in dbt Cloud, configure the "Target Name" field for your job in the Job Settings page. +To set a custom target name for a job in dbt Cloud, configure the **Target Name** field for your job in the Job Settings page. - + ## dbt Cloud IDE -When developing in dbt Cloud, you can set a custom target name in your development credentials. Head to your account (via your profile image in the top right hand corner), select the project under "Credentials" and update the target name. +When developing in dbt Cloud, you can set a custom target name in your development credentials. Go to your account (from the gear menu in the top right hand corner), select the project under **Credentials**, and update the target name. diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables.md b/website/docs/docs/build/environment-variables.md similarity index 77% rename from website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables.md rename to website/docs/docs/build/environment-variables.md index a841ba76b0e..c6a2517c9a2 100644 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables.md +++ b/website/docs/docs/build/environment-variables.md @@ -1,11 +1,11 @@ --- title: Environment variables -id: "cloud-environment-variables" +id: "environment-variables" description: "Use environment variables to customize the behavior of your dbt project." --- Environment variables can be used to customize the behavior of a dbt project depending on where the project is running. See the docs on -[env_var](dbt-jinja-functions/env_var) for more information on how to call the jinja function `{{env_var('DBT_KEY','OPTIONAL_DEFAULT')}}` in your project code. +[env_var](/reference/dbt-jinja-functions/env_var) for more information on how to call the jinja function `{{env_var('DBT_KEY','OPTIONAL_DEFAULT')}}` in your project code. :::info Environment Variable Naming and Prefixing Environment variables in dbt Cloud must be prefixed with either `DBT_` or `DBT_ENV_SECRET_`. Environment variables keys are uppercased and case sensitive. When referencing `{{env_var('DBT_KEY')}}` in your project's code, the key must match exactly the variable defined in dbt Cloud's UI. @@ -19,16 +19,16 @@ Environment variable values can be set in multiple places within dbt Cloud. As a -There are four levels of environment variables: +There are four levels of environment variables: 1. the optional default argument supplied to the `env_var` Jinja function in code - 2. a project-wide default value, which can be overridden at - 3. the environment level, which can in turn be overridden again at + 2. a project-wide default value, which can be overridden at + 3. the environment level, which can in turn be overridden again at 4. the job level (job override) or in the IDE for an individual dev (personal override). **Setting environment variables at the project and environment level** -To set environment variables at the project and environment level, head over to the Environments section in your main navigation pane. You'll see a to list all your environments and a view to set and update environment variables. Make sure you're in the environment variable view. +To set environment variables at the project and environment level, click **Deploy** in the top left, then select **Environments**. Click **Environments Variables** to add and update your environment variables. @@ -36,8 +36,7 @@ To set environment variables at the project and environment level, head over to You'll notice there is a `Project Default` column. This is a great place to set a value that will persist across your whole project, independent of where the code is run. We recommend setting this value when you want to supply a catch-all default or add a project-wide token or secret. - -To the right of the `Project Default` column are all your environments. Values set at the environment level take priority over the project level default value. This is where you can tell dbt Cloud to intepret an environment value differently in your Staging vs. Production environment, as example. +To the right of the `Project Default` column are all your environments. Values set at the environment level take priority over the project level default value. This is where you can tell dbt Cloud to interpret an environment value differently in your Staging vs. Production environment, as example. @@ -45,9 +44,9 @@ To the right of the `Project Default` column are all your environments. Values s **Overriding environment variables at the job level** -You may have multiple jobs that run in the same environment, and you'd like the environment variable to be interpretted differently depending on the job. +You may have multiple jobs that run in the same environment, and you'd like the environment variable to be interpretted differently depending on the job. -When setting up or editing a job, you will see a button in the Environment section to `Set Environment Variables`. Clicking on that button will take you to a section where you can override environment variable values set at the environment or project level. +When setting up or editing a job, you will see a section where you can override environment variable values defined at the environment or project level. @@ -60,7 +59,7 @@ Every job runs in a specific, deployment environment, and by default, a job will **Overriding environment variables at the personal level** -Developers may also want to set a personal value override for an environment variable while developing in the dbt IDE. By default, dbt Cloud will look to use environment variable values set in the project's development environment. To see and override these values, developers should head to their Profile and then select their project under Credentials and scroll to the Environment Variables section. +You can also set a personal value override for an environment variable when you develop in the dbt integrated developer environment (IDE). By default, dbt Cloud uses environment variable values set in the project's development environment. To see and override these values, click the gear icon in the top right. Under "Your Profile," click **Credentials** and select your project. Click **Edit** and make any changes in "Environment Variables." @@ -84,12 +83,12 @@ There are some known issues with partial parsing of a project and changing envir ### Handling secrets -While all environment variables are encrypted at rest in dbt Cloud, dbt Cloud has additional capabilities for managing environment variables with secret or otherwise sensitive values. If you want a particular environment variable to be scrubbed from all logs and error messages, in addition to obfuscating the value in the UI, you can prefix the key with `DBT_ENV_SECRET_`. This functionality is supported from `dbt v1.0` and on. +While all environment variables are encrypted at rest in dbt Cloud, dbt Cloud has additional capabilities for managing environment variables with secret or otherwise sensitive values. If you want a particular environment variable to be scrubbed from all logs and error messages, in addition to obfuscating the value in the UI, you can prefix the key with `DBT_ENV_SECRET_`. This functionality is supported from `dbt v1.0` and on. -**Note**: An environment variable can be used to store a [git token for repo cloning](/cloud-environment-variables#clone-private-packages). We recommend you make the git token's permissions read only and consider using a machine account or service user's PAT with limited repo access in order to practice good security hygiene. +**Note**: An environment variable can be used to store a [git token for repo cloning](/docs/build/environment-variables#clone-private-packages). We recommend you make the git token's permissions read only and consider using a machine account or service user's PAT with limited repo access in order to practice good security hygiene. ### Special environment variables @@ -102,12 +101,12 @@ dbt Cloud has a number of pre-defined variables built in. The following environm - `DBT_CLOUD_PROJECT_ID`: The ID of the dbt Cloud Project for this run - `DBT_CLOUD_JOB_ID`: The ID of the dbt Cloud Job for this run - `DBT_CLOUD_RUN_ID`: The ID of this particular run -- `DBT_CLOUD_RUN_REASON_CATEGORY`: The "category" of the trigger for this run (one of: `scheduled`, `github_pull_request`, `gitlab_merge_request`, `other`) +- `DBT_CLOUD_RUN_REASON_CATEGORY`: The "category" of the trigger for this run (one of: `scheduled`, `github_pull_request`, `gitlab_merge_request`, `azure_pull_request`, `other`) - `DBT_CLOUD_RUN_REASON`: The specific trigger for this run (eg. `Scheduled`, `Kicked off by `, or custom via `API`) **Git details** -_Note: These variables are currently only available for GitHub and GitLab +_Note: These variables are currently only available for GitHub, GitLab, and Azure DevOps PR builds triggered via a webhook_ - `DBT_CLOUD_PR_ID`: The Pull Request ID in the connected version control system @@ -119,12 +118,12 @@ PR builds triggered via a webhook_ Environment variables can be used in many ways, and they give you the power and flexibility to do what you want to do more easily in dbt Cloud. #### Clone private packages -Now that you can set secrets as environment variables, you can pass git tokens into your package HTTPS URLs to allow for on-the-fly cloning of private repositories. Read more about enabling [private package cloning](/building-a-dbt-project/package-management#private-packages). +Now that you can set secrets as environment variables, you can pass git tokens into your package HTTPS URLs to allow for on-the-fly cloning of private repositories. Read more about enabling [private package cloning](/docs/build/packages#private-packages). #### Dynamically set your warehouse in your Snowflake connection -Environment variables make it possible to dynamically change the Snowflake virtual warehouse size depending on the job. Instead of calling the warehouse name directly in your project connection, you can reference an environment variable which will get set to a specific virtual warehouse at runtime. +Environment variables make it possible to dynamically change the Snowflake virtual warehouse size depending on the job. Instead of calling the warehouse name directly in your project connection, you can reference an environment variable which will get set to a specific virtual warehouse at runtime. -As example, suppose you'd like to run a full-refresh job in an XL warehouse, but your incremental job only needs to run in a medium-sized warehouse. Both jobs are configured in the same dbt Cloud environment. In your connection configuration, you can use an environment variable to set the warehouse name to `{{env_var('DBT_WAREHOUSE')}}`. Then in the job settings, you can set a different value for the `DBT_WAREHOUSE` environment variable depending on the job's workload. +For example, suppose you'd like to run a full-refresh job in an XL warehouse, but your incremental job only needs to run in a medium-sized warehouse. Both jobs are configured in the same dbt Cloud environment. In your connection configuration, you can use an environment variable to set the warehouse name to `{{env_var('DBT_WAREHOUSE')}}`. Then in the job settings, you can set a different value for the `DBT_WAREHOUSE` environment variable depending on the job's workload. diff --git a/website/docs/docs/building-a-dbt-project/exposures.md b/website/docs/docs/build/exposures.md similarity index 85% rename from website/docs/docs/building-a-dbt-project/exposures.md rename to website/docs/docs/build/exposures.md index 44cba161b5b..2c06c5f4506 100644 --- a/website/docs/docs/building-a-dbt-project/exposures.md +++ b/website/docs/docs/build/exposures.md @@ -14,7 +14,7 @@ id: "exposures" * [Exposure properties](exposure-properties) * [`exposure:` selection method](node-selection/methods#the-exposure-method) -## Getting started +## Overview Exposures make it possible to define and describe a downstream use of your dbt project, such as in a dashboard, application, or data science pipeline. By defining exposures, you can then: - run, test, and list resources that feed into your exposure @@ -27,20 +27,22 @@ Exposures are defined in `.yml` files nested under an `exposures:` key. ```yaml +version: 2 + exposures: - + - name: weekly_jaffle_metrics type: dashboard maturity: high url: https://bi.tool/dashboards/1 description: > Did someone say "exponential growth"? - + depends_on: - ref('fct_orders') - ref('dim_customers') - source('gsheets', 'goals') - + owner: name: Claire from Data email: data@jaffleshop.com @@ -51,7 +53,7 @@ exposures: ### Available properties _Required:_ -- **name** (must be unique among exposures) +- **name** (must be unique among exposures and you must use the [snake case](https://en.wikipedia.org/wiki/Snake_case) naming convention) - **type**: one of `dashboard`, `notebook`, `analysis`, `ml`, `application` (used to organize in docs site) - **owner**: email @@ -59,7 +61,7 @@ _Expected:_ - **depends_on**: list of refable nodes (`ref` + `source`) _Optional:_ -- **url** +- **url**: enables the link to **View this exposure** in the upper right corner of the generated documentation site - **maturity**: one of `high`, `medium`, `low` - **owner**: name @@ -82,7 +84,3 @@ When we generate our documentation site, you'll see the exposure appear: - -## Exposures are new! - -Exposures were introduced in dbt v0.18.1, with a limited set of supported types and properties. If you're interested in requesting or contributing additional properties, check out issue [dbt#2835](https://github.com/dbt-labs/dbt-core/issues/2835). diff --git a/website/docs/docs/building-a-dbt-project/hooks-operations.md b/website/docs/docs/build/hooks-operations.md similarity index 56% rename from website/docs/docs/building-a-dbt-project/hooks-operations.md rename to website/docs/docs/build/hooks-operations.md index 90a7efdfd41..524a75aaf9c 100644 --- a/website/docs/docs/building-a-dbt-project/hooks-operations.md +++ b/website/docs/docs/build/hooks-operations.md @@ -1,5 +1,5 @@ --- -title: "Hooks & Operations" +title: "Hooks and operations" id: "hooks-operations" --- @@ -8,16 +8,16 @@ id: "hooks-operations" * [on-run-start & on-run-end](on-run-start-on-run-end) * [`run-operation` command](run-operation) -## Assumed knowledge +### Assumed knowledge * [Project configurations](reference/dbt_project.yml.md) * [Model configurations](model-configs) * [Macros](jinja-macros#macros) -## Getting started +## Getting started with hooks and operations Effective database administration sometimes requires additional SQL statements to be run, for example: -- Granting privileges on an / view - Creating UDFs +- Managing row- or column-level permissions - Vacuuming tables on Redshift - Creating partitions in Redshift Spectrum external tables - Resuming/pausing/resizing warehouses in Snowflake @@ -25,18 +25,55 @@ Effective database administration sometimes requires additional SQL statements t - Create a share on Snowflake - Cloning a database on Snowflake -dbt provides two different interfaces for you to version control and execute these statements as part of your dbt project — hooks and operations. +dbt provides hooks and operations so you can version control and execute these statements as part of your dbt project. + +## About hooks -### Hooks Hooks are snippets of SQL that are executed at different times: * `pre-hook`: executed _before_ a model, seed or snapshot is built. * `post-hook`: executed _after_ a model, seed or snapshot is built. * `on-run-start`: executed at the _start_ of `dbt run`, `dbt seed` or `dbt snapshot` * `on-run-end`: executed at the _end_ of `dbt run`, `dbt seed` or `dbt snapshot` -Hooks are defined in your `dbt_project.yml` file. Pre- and post-hooks can also be defined in a `config` block. +Hooks are a more-advanced capability that enable you to run custom SQL, and leverage database-specific actions, beyond what dbt makes available out-of-the-box with standard materializations and configurations. + + + + + +If (and only if) you can't leverage the [`grants` resource-config](/reference/resource-configs/grants), you can use `post-hook` to perform more advanced workflows: + +* Need to apply `grants` in a more complex way, which the dbt Core v1.2 `grants` config does not (yet) support. +* Need to perform post-processing that dbt does not support out-of-the-box. For example, `analyze table`, `alter table set property`, `alter table ... add row access policy`, etc. + +### Examples using hooks + +You can use hooks to trigger actions at certain times when running an operation or building a model, seed, or snapshot. + +For more information about when hooks can be triggered, see reference sections for [`on-run-start` and `on-run-end` hooks](on-run-start-on-run-end) and [`pre-hook`s and `post-hook`s](pre-hook-post-hook). + +You can use hooks to provide database-specific functionality not available out-of-the-box with dbt. For example, you can use a `config` block to run an `ALTER TABLE` statement right after building an individual model using a `post-hook`: + + + +```sql +{{ config( + post_hook=[ + "alter table {{ this }} ..." + ] +) }} +``` + + + + + + + + +### Examples using hooks -Here's a minimal example of using hooks to grant privileges. You can find more information in the reference sections for [`on-run-start` and `on-run-end` hooks](on-run-start-on-run-end) and [`pre-hook`s and `post-hook`s](pre-hook-post-hook). +Here's a minimal example of using hooks to grant privileges. For more information, see [`on-run-start` & `on-run-end` hooks](on-run-start-on-run-end) and [`pre-hook` & `post-hook`](pre-hook-post-hook) reference sections. @@ -69,13 +106,96 @@ select ... -:::tip Calling a macro in a hook +You should use database-specific syntax when appropriate: -You can also use a [macro](jinja-macros#macros) to bundle up hook logic. Check out some of the examples in the reference sections for [on-run-start and on-run-end hooks](on-run-start-on-run-end) and [pre- and post-hooks](pre-hook-post-hook), + -::: +
+ + + +```sql +{{ config( + post_hook=[ + 'grant `roles/bigquery.dataViewer` on {{ this.type }} {{ this }} to "user:someone@yourcompany.com"' + ] +) }} + +select ... + +``` + + + +
+ +
+ + + +```sql +{{ config( + post_hook=[ + "grant select on {{ this }} to `someone@yourcompany.com`" + ] +) }} + +select ... + +``` + + + +
+ +
+ + + +```sql +{{ config( + post_hook=[ + "grant select on {{ this }} to reporter" + ] +) }} + +select ... + +``` + + + +
+ +
+ + + +```sql +{{ config( + post_hook=[ + "grant select on {{ this }} to role reporter" + ] +) }} + +select ... + +``` + + + +
+ +
+ +
+ +### Calling a macro in a hook + +You can also use a [macro](jinja-macros#macros) to bundle up hook logic. Check out some of the examples in the reference sections for [on-run-start and on-run-end hooks](on-run-start-on-run-end) and [pre- and post-hooks](pre-hook-post-hook). + +## About operations -### Operations Operations are [macros](jinja-macros#macros) that you can run using the [`run-operation` command](run-operation) command. As such, operations aren't actually a separate resource in your dbt project — they are just a convenient way to invoke a macro without needing to run a model. :::info Explicitly execute the SQL in an operation @@ -115,9 +235,10 @@ Full usage docs can for the `run-operation` command can be found [here](run-oper ## Additional examples + These examples from the community highlight some of the use-cases for hooks and operations! -* [In-depth discussion of granting privileges using hooks and operations](https://discourse.getdbt.com/t/the-exact-grant-statements-we-use-in-a-dbt-project/430) +* [In-depth discussion of granting privileges using hooks and operations, for dbt Core versions prior to 1.2](https://discourse.getdbt.com/t/the-exact-grant-statements-we-use-in-a-dbt-project/430) * [Staging external tables](https://github.com/dbt-labs/dbt-external-tables) * [Performing a zero copy clone on Snowflake to reset a dev environment](https://discourse.getdbt.com/t/creating-a-dev-environment-quickly-on-snowflake/1151/2) * [Running `vacuum` and `analyze` on a Redshift warehouse](https://github.com/dbt-labs/redshift/tree/0.2.3/#redshift_maintenance_operation-source) diff --git a/website/docs/docs/building-a-dbt-project/building-models/configuring-incremental-models.md b/website/docs/docs/build/incremental-models.md similarity index 95% rename from website/docs/docs/building-a-dbt-project/building-models/configuring-incremental-models.md rename to website/docs/docs/build/incremental-models.md index 2e1c47421d8..08911764e32 100644 --- a/website/docs/docs/building-a-dbt-project/building-models/configuring-incremental-models.md +++ b/website/docs/docs/build/incremental-models.md @@ -1,11 +1,11 @@ --- -title: "Configuring incremental models" -id: "configuring-incremental-models" +title: "Incremental models" +id: "incremental-models" --- -## About incremental models +## Overview -Incremental models are built as tables in your data warehouse. The first time a model is run, the is built by transforming _all_ rows of source data. On subsequent runs, dbt transforms _only_ the rows in your source data that you tell dbt to filter for, inserting them into the target table which is the table that has already been built. +Incremental models are built as tables in your . The first time a model is run, the is built by transforming _all_ rows of source data. On subsequent runs, dbt transforms _only_ the rows in your source data that you tell dbt to filter for, inserting them into the target table which is the table that has already been built. Often, the rows you filter for on an incremental run will be the rows in your source data that have been created or updated since the last time dbt ran. As such, on each dbt run, your model gets built incrementally. @@ -260,7 +260,7 @@ or: config( materialized='incremental', unique_key='date_day', - incremental_strategy='insert_overwrite', + incremental_strategy='delete+insert', ... ) }} @@ -299,3 +299,6 @@ select ... ```
+ + + diff --git a/website/docs/docs/building-a-dbt-project/jinja-macros.md b/website/docs/docs/build/jinja-macros.md similarity index 90% rename from website/docs/docs/building-a-dbt-project/jinja-macros.md rename to website/docs/docs/build/jinja-macros.md index 3457401409c..64ccd16d331 100644 --- a/website/docs/docs/building-a-dbt-project/jinja-macros.md +++ b/website/docs/docs/build/jinja-macros.md @@ -1,11 +1,11 @@ --- -title: "Jinja & Macros" +title: "Jinja and macros" id: "jinja-macros" --- ## Related reference docs * [Jinja Template Designer Documentation](https://jinja.palletsprojects.com/page/templates/) (external link) -* [dbt Jinja context](dbt-jinja-functions) +* [dbt Jinja context](/reference/dbt-jinja-functions) * [Macro properties](macro-properties) ## Overview @@ -22,8 +22,7 @@ Using Jinja turns your dbt project into a programming environment for SQL, givin In fact, if you've used the [`{{ ref() }}` function](ref), you're already using Jinja! -Jinja can be used in any SQL in a dbt project, including [models](building-models), [analyses](analyses), [tests](building-a-dbt-project/tests), and even [hooks](hooks-operations). - +Jinja can be used in any SQL in a dbt project, including [models](/docs/build/sql-models), [analyses](analyses), [tests](/docs/build/tests), and even [hooks](hooks-operations). :::info Ready to get started with Jinja and macros? @@ -126,7 +125,7 @@ from app_data.payments ### Using a macro from a package -A number of useful macros have also been grouped together into [packages](package-management) — our most popular package is [dbt-utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/). +A number of useful macros have also been grouped together into [packages](docs/build/packages) — our most popular package is [dbt-utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/). After installing a package into your project, you can use any of the macros in your own project — make sure you qualify the macro by prefixing it with the [package name](project-configs/name): @@ -148,13 +147,13 @@ You can also qualify a macro in your own project by prefixing it with your [pack ## FAQs - - - - - - - + + + + + + + ## dbtonic Jinja @@ -184,3 +183,6 @@ Writing a macro for the first time? Check whether we've open sourced one in [dbt ... {% endfor %} ``` + + + diff --git a/website/docs/docs/building-a-dbt-project/building-models/materializations.md b/website/docs/docs/build/materializations.md similarity index 63% rename from website/docs/docs/building-a-dbt-project/building-models/materializations.md rename to website/docs/docs/build/materializations.md index fb7f9ef307d..c89199edd83 100644 --- a/website/docs/docs/building-a-dbt-project/building-models/materializations.md +++ b/website/docs/docs/build/materializations.md @@ -4,7 +4,9 @@ id: "materializations" --- ## Overview + Materializations are strategies for persisting dbt models in a warehouse. There are four types of materializations built into dbt. They are: + - - - incremental @@ -82,7 +84,7 @@ When using the `table` materialization, your model is rebuilt as a expression. * **Pros:** * You can still write reusable logic - * Ephemeral models can help keep your data warehouse clean by reducing clutter (also consider splitting your models across multiple schemas by [using custom schemas](using-custom-schemas)). + - Ephemeral models can help keep your clean by reducing clutter (also consider splitting your models across multiple schemas by [using custom schemas](/docs/build/custom-schemas)). * **Cons:** * You cannot select directly from this model. * Operations (e.g. macros called via `dbt run-operation` cannot `ref()` ephemeral nodes) @@ -100,3 +102,83 @@ When using the `table` materialization, your model is rebuilt as a + + + +```python +import snowflake.snowpark.functions as F + +def model(dbt, session): + dbt.config(materialized = "incremental") + df = dbt.ref("upstream_table") + + if dbt.is_incremental: + + # only new rows compared to max in current table + max_from_this = f"select max(updated_at) from {dbt.this}" + df = df.filter(df.updated_at >= session.sql(max_from_this).collect()[0][0]) + + # or only rows from the past 3 days + df = df.filter(df.updated_at >= F.dateadd("day", F.lit(-3), F.current_timestamp())) + + ... + + return df +``` + + + + + +
+ + + +```python +import pyspark.sql.functions as F + +def model(dbt, session): + dbt.config(materialized = "incremental") + df = dbt.ref("upstream_table") + + if dbt.is_incremental: + + # only new rows compared to max in current table + max_from_this = f"select max(updated_at) from {dbt.this}" + df = df.filter(df.updated_at >= session.sql(max_from_this).collect()[0][0]) + + # or only rows from the past 3 days + df = df.filter(df.updated_at >= F.date_add(F.current_timestamp(), F.lit(-3))) + + ... + + return df +``` + + + +
+ + + +**Note:** Incremental models are supported on BigQuery/Dataproc for the `merge` incremental strategy. The `insert_overwrite` strategy is not yet supported. + + + + diff --git a/website/docs/docs/build/metrics.md b/website/docs/docs/build/metrics.md new file mode 100644 index 00000000000..681aec63dca --- /dev/null +++ b/website/docs/docs/build/metrics.md @@ -0,0 +1,498 @@ +--- +title: "Metrics" +id: "metrics" +description: "When you define metrics in dbt projects, you encode crucial business logic in tested, version-controlled code. The dbt metrics layer helps you standardize metrics within your organization." +keywords: + - dbt metrics layer +--- + + + +* **v1.3.0**: Metrics have been moved out of the experimental phase +* **v1.0.0**: Metrics are new and experimental + + + +## About Metrics + +A metric is a timeseries aggregation over a that supports zero or more dimensions. Some examples of metrics include: +- active users +- monthly recurring revenue (mrr) + +In v1.0, dbt supports metric definitions as a new node type. Like [exposures](exposures), metrics appear as nodes in the directed acyclic graph (DAG) and can be expressed in YAML files. Defining metrics in dbt projects encodes crucial business logic in tested, version-controlled code. Further, you can expose these metrics definitions to downstream tooling, which drives consistency and precision in metric reporting. + +Review the video below to learn more about metrics, why they're important, and how to get started: + + + +### Benefits of defining metrics + +**Use metric specifications in downstream tools** +dbt's compilation context can access metrics via the [`graph.metrics` variable](graph). The [manifest artifact](manifest-json) includes metrics for downstream metadata consumption. + +**See and select dependencies** +As with Exposures, you can see everything that rolls up into a metric (`dbt ls -s +metric:*`), and visualize them in [dbt documentation](documentation). For more information, see "[The `metric:` selection method](node-selection/methods#the-metric-method)." + + + +## Defining a metric + +You can define metrics in `.yml` files nested under a `metrics:` key. Metric names must: +- contain only letters, numbers, and underscores (no spaces or special characters) +- begin with a letter +- contain no more than 250 characters + +For a short human-friendly name with title casing, spaces, and special characters, use the `label` property. More examples and guidance for how to [define and structure metrics can be found here.](https://docs.getdbt.com/blog/how-to-design-and-structure-metrics). + +### Example definition + + + + + +```yaml +# models/marts/product/schema.yml + +version: 2 + +models: + - name: dim_customers + ... + +metrics: + - name: rolling_new_customers + label: New Customers + model: ref('dim_customers') + [description](description): "The 14 day rolling count of paying customers using the product" + + calculation_method: count_distinct + expression: user_id + + timestamp: signup_date + time_grains: [day, week, month, quarter, year, all_time] + + dimensions: + - plan + - country + + window: + count: 14 + period: day + + filters: + - field: is_paying + operator: 'is' + value: 'true' + - field: lifetime_value + operator: '>=' + value: '100' + - field: company_name + operator: '!=' + value: "'Acme, Inc'" + - field: signup_date + operator: '>=' + value: "'2020-01-01'" + + # general properties + [config](resource-properties/config): + enabled: true | false + treat_null_values_as_zero: true | false + + [meta](resource-configs/meta): {team: Finance} +``` + + + + +```yaml +# models/marts/product/schema.yml + +version: 2 + +models: + - name: dim_customers + ... + +metrics: + - name: rolling_new_customers + label: New Customers + model: ref('dim_customers') + description: "The 14 day rolling count of paying customers using the product" + + type: count_distinct + sql: user_id + + timestamp: signup_date + time_grains: [day, week, month, quarter, year, all_time] + + dimensions: + - plan + - country + + filters: + - field: is_paying + operator: 'is' + value: 'true' + - field: lifetime_value + operator: '>=' + value: '100' + - field: company_name + operator: '!=' + value: "'Acme, Inc'" + - field: signup_date + operator: '>=' + value: "'2020-01-01'" + + meta: {team: Finance} +``` + + + + +:::caution + +- You cannot define metrics on [ephemeral models](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/materializations#ephemeral). To define a metric, the materialization must have a representation in the data warehouse. + +::: + + +### Available properties +Metrics can have many declared **properties**, which define aspects of your metric. More information on [properties and configs can be found here](https://docs.getdbt.com/reference/configs-and-properties). + + + +| Field | Description | Example | Required? | +|-------------|-------------------------------------------------------------|---------------------------------|-----------| +| name | A unique identifier for the metric | new_customers | yes | +| model | The dbt model that powers this metric | dim_customers | yes (no for `derived` metrics)| +| label | A short for name / label for the metric | New Customers | yes | +| description | Long form, human-readable description for the metric | The number of customers who.... | no | +| calculation_method | The method of calculation (aggregation or derived) that is applied to the expression | count_distinct | yes | +| expression | The expression to aggregate/calculate over | user_id, cast(user_id as int) | yes | +| timestamp | The time-based component of the metric | signup_date | yes | +| time_grains | One or more "grains" at which the metric can be evaluated. For more information, see the "Custom Calendar" section. | [day, week, month, quarter, year] | yes | +| dimensions | A list of dimensions to group or filter the metric by | [plan, country] | no | +| window | A dictionary for aggregating over a window of time. Used for rolling metrics such as 14 day rolling average. Acceptable periods are: [`day`,`week`,`month`, `year`, `all_time`] | {count: 14, period: day} | no | +| filters | A list of filters to apply before calculating the metric | See below | no | +| config | [Optional configurations](https://github.com/dbt-labs/dbt_metrics#accepted-metric-configurations) for calculating this metric | {treat_null_values_as_zero: true} | no | +| meta | Arbitrary key/value store | {team: Finance} | no | + + + + + +| Field | Description | Example | Required? | +|-------------|-------------------------------------------------------------|---------------------------------|-----------| +| name | A unique identifier for the metric | new_customers | yes | +| model | The dbt model that powers this metric | dim_customers | yes (no for `derived` metrics)| +| label | A short for name / label for the metric | New Customers |yes | +| description | Long form, human-readable description for the metric | The number of customers who.... | no | +| type | The method of calculation (aggregation or derived) that is applied to the expression | count_distinct | yes | +| sql | The expression to aggregate/calculate over | user_id, cast(user_id as int) | yes | +| timestamp | The time-based component of the metric | signup_date | yes | +| time_grains | One or more "grains" at which the metric can be evaluated | [day, week, month, quarter, year, all_time] | yes | +| dimensions | A list of dimensions to group or filter the metric by | [plan, country] | no | +| filters | A list of filters to apply before calculating the metric | See below | no | +| meta | Arbitrary key/value store | {team: Finance} | no | + + + + +### Available calculation methods + + +The method of calculation (aggregation or derived) that is applied to the expression. + + +The type of calculation (aggregation or expression) that is applied to the sql property. + + + +| Metric Calculation Method Metric Type | Description | +|----------------|----------------------------------------------------------------------------| +| count | This metric type will apply the `count` aggregation to the specified field | +| count_distinct | This metric type will apply the `count` aggregation to the specified field, with an additional distinct statement inside the aggregation | +| sum | This metric type will apply the `sum` aggregation to the specified field | +| average | This metric type will apply the `average` aggregation to the specified field | +| min | This metric type will apply the `min` aggregation to the specified field | +| max | This metric type will apply the `max` aggregation to the specified field | +|derived expression | This metric type is defined as any _non-aggregating_ calculation of 1 or more metrics | + + + +### Derived Metrics +In v1.2, support was added for `derived` metrics (previously named `expression`), which are defined as non-aggregating calculations of 1 or more metrics. An example of this would be `{{metric('total_revenue')}} / {{metric('count_of_customers')}}`. + + By defining these metrics, you are able to create metrics like: +- ratios +- subtractions +- any arbitrary calculation + +As long as the two (or more) base metrics (metrics that comprise the `derived` metric) share the specified `time_grains` and `dimensions`, those attributes can be used in any downstream metrics macro. + +An example definition of an `derived` metric is: + + + + +```yaml +# models/marts/product/schema.yml +version: 2 + +models: + - name: dim_customers + ... + +metrics: + - name: average_revenue_per_customer + label: Average Revenue Per Customer + description: "The average revenue received per customer" + + calculation_method: derived + expression: "{{metric('total_revenue')}} / {{metric('count_of_customers')}}" + + timestamp: order_date + time_grains: [day, week, month, quarter, year, all_time] + dimensions: + - had_discount + - order_country + +``` + + + + + +### Expression Metrics +In v1.2, support was added for `expression` metrics, which are defined as non-aggregating calculations of 1 or more metrics. By defining these metrics, you are able to create metrics like: +- ratios +- subtractions +- any arbitrary calculation + +As long as the two+ base metrics (the metrics that comprise the `expression` metric) share the specified `time_grains` and `dimensions`, those attributes can be used in any downstream metrics macro. + +An example definition of an `expression` metric is: + + + + +```yaml +# models/marts/product/schema.yml +version: 2 + +models: + - name: dim_customers + ... + +metrics: + - name: average_revenue_per_customer + label: Average Revenue Per Customer + description: "The average revenue received per customer" + + type: expression + sql: "{{metric('total_revenue')}} / {{metric('count_of_customers')}}" + + timestamp: order_date + time_grains: [day, week, month, quarter, year, all_time] + dimensions: + - had_discount + - order_country + +``` + + +### Filters +Filters should be defined as a list of dictionaries that define predicates for the metric. Filters are combined using AND clauses. For more control, users can (and should) include the complex logic in the model powering the metric. + +All three properties (`field`, `operator`, `value`) are required for each defined filter. + +Note that `value` must be defined as a string in YAML, because it will be compiled into queries as part of a string. If your filter's value needs to be surrounded in quotes inside the query (e.g. text or dates), use `"'nested'"` quotes: + +```yml + filters: + - field: is_paying + operator: 'is' + value: 'true' + - field: lifetime_value + operator: '>=' + value: '100' + - field: company_name + operator: '!=' + value: "'Acme, Inc'" + - field: signup_date + operator: '>=' + value: "'2020-01-01'" +``` + +## Querying Your Metric +You can dynamically query metrics directly in dbt and verify them before running a job in the deployment environment. To query your defined metric, you must have the [dbt_metrics package](https://github.com/dbt-labs/dbt_metrics) installed. Information on how to [install packages can be found here](https://docs.getdbt.com/docs/building-a-dbt-project/package-management#how-do-i-add-a-package-to-my-project). + +Use the following [metrics package](https://hub.getdbt.com/dbt-labs/metrics/latest/) installation code in your packages.yml file and run `dbt deps` to install the metrics package: + + + +```yml +packages: + - package: dbt-labs/metrics + version: [">=1.3.0", "<1.4.0"] +``` + + + + + +```yml +packages: + - package: dbt-labs/metrics + version: [">=0.3.0", "<0.4.0"] +``` + + + + + +```yml +packages: + - package: dbt-labs/metrics + version: [">=0.2.0", "<0.3.0"] +``` + + + +Once the package has been installed with `dbt deps`, make sure to run the `dbt_metrics_calendar_model` model as this is required for macros used to query metrics. More information on this, and additional calendar functionality, can be found in the [project README](https://github.com/dbt-labs/dbt_metrics#calendar). + +### Querying metrics with `metrics.calculate` +Use the `metrics.calculate` macro along with defined metrics to generate a SQL statement that runs the metric aggregation to return the correct metric dataset. Example below: + + + +```sql +select * +from {{ metrics.calculate( + metric('new_customers'), + grain='week', + dimensions=['plan', 'country'] +) }} +``` + + + + + +```sql +select * +from {{ metrics.calculate( + metric_name='new_customers', + grain='week', + dimensions=['plan', 'country'] +) }} +``` + + + +### Supported inputs +The example above doesn't display all the potential inputs you can provide to the macro. + +You may find some pieces of functionality, like secondary calculations, complicated to use. We recommend reviewing the [package README](https://github.com/dbt-labs/dbt_metrics) for more in-depth information about each of the inputs that are not covered in the table below. + + +| Input | Example | Description | Required | +| ----------- | ----------- | ----------- | -----------| +| metric_listmetric_name | `metric('some_metric)'`,
[`metric('some_metric)'`,
`metric('some_other_metric)'`]
`'metric_name'`
| The metric(s) to be queried by the macro. If multiple metrics required, provide in list format.The name of the metric | Required | +| grain | `'day'`, `'week'`,
`'month'`, `'quarter'`,
`'year'`, `'all_time'`
| The time grain that the metric will be aggregated to in the returned dataset | Required | +| dimensions | [`'plan'`,
`'country'`] | The dimensions you want the metric to be aggregated by in the returned dataset | Optional | +| secondary_calculations | [`metrics.period_over_period( comparison_strategy="ratio", interval=1, alias="pop_1wk")`] | Performs the specified secondary calculation on the metric results. Examples include period over period calculations, rolling calcultions, and period to date calculations. | Optional | +| start_date | `'2022-01-01'` | Limits the date range of data used in the metric calculation by not querying data before this date | Optional | +| end_date | `'2022-12-31'` | Limits the date range of data used in the metric claculation by not querying data after this date | Optional | +| where | `plan='paying_customer'` | A sql statment, or series of sql statements, that alter the **final** CTE in the generated sql. Most often used to limit the data to specific values of dimensions provided | Optional | + +#### Secondary Calculations +Secondary calculations are window functions you can add to the metric calculation and perform on the primary metric or metrics. + +You can use them to compare values to an earlier period, calculate year-to-date sums, and return rolling averages. You can add custom secondary calculations into dbt projects - for more information on this, reference the [package README](https://github.com/dbt-labs/dbt_metrics#secondary-calculations). + +### Developing metrics with `metrics.develop` + + + +There may be times you want to test what a metric might look like before defining it in your project. In these cases, use the `develop` metric, which allows you to provide metric(s) in a contained yml so you can simulate what a defined metric might look like in your project. + +```sql +{% set my_metric_yml -%} +{% raw %} + +metrics: + -- The name of the metric does not need to be develop_metric + - name: develop_metric + model: ref('fact_orders') + label: Total Discount ($) + timestamp: order_date + time_grains: [day, week, month, quarter, year, all_time] + calculation_method: average + expression: discount_total + dimensions: + - had_discount + - order_country + +{% endraw %} +{%- endset %} + +select * +from {{ metrics.develop( + develop_yml=my_metric_yml, + metric_list=['develop_metric'], + grain='month' + ) + }} +``` + +**Important caveat** - The metric list input for the `metrics.develop` macro takes in the metric names themselves, not the `metric('name')` statement that the `calculate` macro uses. Using the example above: + +- ✅ `['develop_metric']` +- ❌ `[metric('develop_metric')]` + + + + + +There may be times you want to test what a metric might look like before defining it in your project. In these cases, the `develop` metric, which allows you to provide a single metric in a contained yml so you can simulate what a defined metric might look like in your project. + + +```sql +{% set my_metric_yml -%} +{% raw %} + +metrics: + - name: develop_metric + model: ref('fact_orders') + label: Total Discount ($) + timestamp: order_date + time_grains: [day, week, month, quarter, year, all_time] + type: average + sql: discount_total + dimensions: + - had_discount + - order_country + +{% endraw %} +{%- endset %} + +select * +from {{ metrics.develop( + develop_yml=my_metric_yml, + grain='month' + ) + }} +``` + + + + + +Functionality for `develop` is only supported in v1.2 and higher. Please navigate to those versions for information about this method of metric development. + + + + + + diff --git a/website/docs/docs/build/models.md b/website/docs/docs/build/models.md new file mode 100644 index 00000000000..029e2856abb --- /dev/null +++ b/website/docs/docs/build/models.md @@ -0,0 +1,20 @@ +--- +title: "About dbt models" +id: "models" +--- + +## Overview + +dbt Core and Cloud are composed of different moving parts working harmoniously. All of them are important to what dbt does — transforming data—the 'T' in ELT. When you execute `dbt run`, you are running a model that will transform your data without that data ever leaving your warehouse. + +Models are where your developers spend most of their time within a dbt environment. Models are primarily written as a `select` statement and saved as a `.sql` file. While the definition is straightforward, the complexity of the execution will vary from environment to environment. Models will be written and rewritten as needs evolve and your organization finds new ways to maximize efficiency. + +SQL is the language most dbt users will utilize, but it is not the only one for building models. Starting in version 1.3, dbt Core and dbt Cloud support Python models. Python models are useful for training or deploying data science models, complex transformations, or where a specific Python package meets a need — such as using the `dateutil` library to parse dates. + +### Models and modern workflows + +The top level of a dbt workflow is the project. A project is a directory of a `.yml` file (the project configuration) and either `.sql` or `.py` files (the models). The project file tells dbt the project context, and the models let dbt know how to build a specific data set. For more details on projects, refer to [About dbt projects](/docs/build/projects). + +Your organization may need only a few models, but more likely you’ll need a complex structure of nested models to transform the required data. A model is a single file containing a final `select` statement, and a project can have multiple models, and models can even reference each other. Add to that, numerous projects and the level of effort required for transforming complex data sets can improve drastically compared to older methods. + +Learn more about models in [SQL models](/docs/build/sql-models) and [Python models](/docs/build/python-models) pages. If you'd like to begin with a bit of practice, visit our [Getting Started Guide](/docs/get-started/getting-started/overview) for instructions on setting up the Jaffle_Shop sample data so you can get hands-on with the power of dbt. diff --git a/website/docs/docs/building-a-dbt-project/package-management.md b/website/docs/docs/build/packages.md similarity index 94% rename from website/docs/docs/building-a-dbt-project/package-management.md rename to website/docs/docs/build/packages.md index 769d2eb77c5..3a77ce310b4 100644 --- a/website/docs/docs/building-a-dbt-project/package-management.md +++ b/website/docs/docs/build/packages.md @@ -1,6 +1,6 @@ --- title: "Packages" -id: "package-management" +id: "packages" --- ## What is a package? @@ -35,7 +35,7 @@ packages: version: 0.7.0 - git: "https://github.com/dbt-labs/dbt-utils.git" - revision: 0.1.21 + revision: 0.9.2 - local: /opt/dbt/redshift ``` @@ -119,7 +119,7 @@ Packages stored on a Git server can be installed using the `git` syntax, like so ```yaml packages: - git: "https://github.com/dbt-labs/dbt-utils.git" # git URL - revision: 0.1.21 # tag or branch name + revision: 0.9.2 # tag or branch name ``` @@ -145,8 +145,10 @@ As of v0.14.0, dbt will warn you if you install a package using the `git` syntax ### Private packages -#### SSH Key Method -Private packages can be cloned via SSH and an SSH key. When you use SSH keys to authenticate to your git remote server, you don’t need to supply your username and password each time. Read more about SSH keys, how to generate them, and how to add them to your git provider here: [Github](https://docs.github.com/en/github/authenticating-to-github/connecting-to-github-with-ssh) and [GitLab](https://docs.gitlab.com/ee/ssh/). +#### SSH Key Method (Command Line only) +If you're using the Command Line, private packages can be cloned via SSH and an SSH key. + +When you use SSH keys to authenticate to your git remote server, you don’t need to supply your username and password each time. Read more about SSH keys, how to generate them, and how to add them to your git provider here: [Github](https://docs.github.com/en/github/authenticating-to-github/connecting-to-github-with-ssh) and [GitLab](https://docs.gitlab.com/ee/ssh/). @@ -158,6 +160,9 @@ packages: +If you're using dbt Cloud, the SSH key method will not work, but you can use the [HTTPS Git Token Method](https://docs.getdbt.com/docs/building-a-dbt-project/package-management#git-token-method). + + #### Git Token Method This method allows the user to clone via HTTPS by passing in a git token via an environment variable. Be careful of the expiration date of any token you use, as an expired token could cause a scheduled run to fail. Additionally, user tokens can create a challenge if the user ever loses access to a specific repo. diff --git a/website/docs/docs/building-a-dbt-project/building-models/using-variables.md b/website/docs/docs/build/project-variables.md similarity index 88% rename from website/docs/docs/building-a-dbt-project/building-models/using-variables.md rename to website/docs/docs/build/project-variables.md index 3a443a57a55..04d713756d7 100644 --- a/website/docs/docs/building-a-dbt-project/building-models/using-variables.md +++ b/website/docs/docs/build/project-variables.md @@ -1,14 +1,14 @@ --- -title: "Using variables" -id: "using-variables" +title: "Project variables" +id: "project-variables" --- -dbt provides a mechanism, [variables](dbt-jinja-functions/var), to provide data to models for +dbt provides a mechanism, [variables](reference/dbt-jinja-functions/var), to provide data to models for compilation. Variables can be used to [configure timezones](https://github.com/dbt-labs/snowplow/blob/0.3.9/dbt_project.yml#L22), [avoid hardcoding table names](https://github.com/dbt-labs/quickbooks/blob/v0.1.0/dbt_project.yml#L23) or otherwise provide data to models to configure how they are compiled. -To use a variable in a model, hook, or macro, use the `{{ var('...') }}` function. More information on the `var` function can be found [here](dbt-jinja-functions/var). +To use a variable in a model, hook, or macro, use the `{{ var('...') }}` function. More information on the `var` function can be found [here](/reference/dbt-jinja-functions/var). Variables can be defined in two ways: @@ -20,7 +20,7 @@ Variables can be defined in two ways: :::info New in v0.17.0 The syntax for specifying vars in the `dbt_project.yml` file has changed in -dbt v0.17.0. See the [migration guide](migration-guide/upgrading-to-0-17-0) +dbt v0.17.0. See the [migration guide](/guides/migration/versions) for more information on these changes. ::: @@ -67,7 +67,7 @@ $ dbt run --vars '{"key": "value"}' ``` The `--vars` argument accepts a YAML dictionary as a string on the command line. -YAML is convenient because it does not require strict quoting as with JSON. +YAML is convenient because it does not require strict quoting as with . Both of the following are valid and equivalent: @@ -98,3 +98,6 @@ The order of precedence for variable declaration is as follows (highest priority 4. The variable's default argument (if one is provided). If dbt is unable to find a definition for a variable after checking these four places, then a compilation error will be raised. + + + diff --git a/website/docs/docs/build/projects.md b/website/docs/docs/build/projects.md new file mode 100644 index 00000000000..8c6f11e46c2 --- /dev/null +++ b/website/docs/docs/build/projects.md @@ -0,0 +1,72 @@ +--- +title: "About dbt projects" +id: "projects" +--- + +A dbt project informs dbt the context of your project and how to transform your data (build your data sets). By design, dbt enforces the top-level structure of a dbt project such as the `dbt_project.yml` file, the `models` directory, the `snapshots` directory, and so on. Within the directories of the top-level, you can organize your project in any way that meets the needs of your organization and data pipeline. + +At a minimum, all a project needs is the `dbt_project.yml` project configuration file. dbt supports a number of different resources, so a project may also include: + +| Resource | Description | +| :--- | :--- | +| [models](/docs/build/models) | Each model lives in a single file and contains logic that either transforms raw data into a dataset that is ready for analytics or, more often, is an intermediate step in such a transformation. | +| [snapshots](/docs/build/snapshots) | A way to capture the state of your mutable tables so you can refer to it later. | +| [seeds](/docs/build/seeds) | CSV files with static data that you can load into your data platform with dbt. | +| [tests](/docs/build/tests) | SQL queries that you can write to test the models and resources in your project. | +| [macros](/docs/build/jinja-macros) | Blocks of code that you can reuse multiple times. | +| [docs](/docs/collaborate/documentation) | Docs for your project that you can build. | +| [sources](/docs/build/sources) | A way to name and describe the data loaded into your warehouse by your Extract and Load tools. | +| [exposures](/docs/build/exposures) | A way to define and describe a downstream use of your project. | +| [metrics](/docs/build/metrics) | A way for you to define metrics for your project. | +| [analysis](/docs/build/analyses) | A way to organize analytical SQL queries in your project such as the general ledger from your QuickBooks. | + +When building out the structure of your project, you should consider these impacts to your organization's workflow: + +* **How would people run dbt commands** — Selecting a path +* **How would people navigate within the project** — Whether as developers in the IDE or stakeholders from the docs +* **How would people configure the models** — Some bulk configurations are easier done at the directory level so people don’t have to remember to do everything in a config block with each new model + +## Project configuration +Every dbt project includes a project configuration file called `dbt_project.yml`. It defines the directory of the dbt project and other project configurations. + +Edit `dbt_project.yml` to set up common project configurations such as: + +
+ +| YAML key | Value description | +| :--- | :--- | +| [name](/reference/project-configs/name) | Your project’s name in [snake case](https://en.wikipedia.org/wiki/Snake_case) | +| [version](/reference/project-configs/version) | Version of your project | +| [require-dbt-version](/reference/project-configs/require-dbt-version) | Restrict your project to only work with a range of [dbt Core versions](/docs/dbt-versions/core) | +| [profile](/reference/project-configs/profile) | The profile dbt uses to connect to your data platform | +| [model-paths](/reference/project-configs/model-paths) | Directories to where your model and source files live | +| [seed-paths](/reference/project-configs/seed-paths) | Directories to where your seed files live | +| [test-paths](/reference/project-configs/test-paths) | Directories to where your test files live | +| [analysis-paths](/reference/project-configs/analysis-paths) | Directories to where your analyses live | +| [macro-paths](/reference/project-configs/macro-paths) | Directories to where your macros live | +| [snapshot-paths](/reference/project-configs/snapshot-paths) | Directories to where your snapshots live | +| [docs-paths](/reference/project-configs/docs-paths) | Directories to where your docs blocks live | +| [vars](/docs/build/project-variables) | Project variables you want to use for data compilation | + +
+ +For complete details on project configurations, see [dbt_project.yml](/reference/dbt_project.yml). + +## New projects + +You can create new projects and [share them](/docs/collaborate/git-version-control) with other people by making them available on a hosted git repository like GitHub, GitLab, and BitBucket. + +After you set up a connection with your data platform, you can [initialize your new project in dbt Cloud](/docs/get-started/getting-started/set-up-dbt-cloud) and start developing. Or, run [dbt init from the command line](/reference/commands/init) to set up your new project. + +During project initialization, dbt creates sample model files in your project directory to help you start developing quickly. + +## Sample projects + +If you want to explore dbt projects more in-depth, you can clone dbt Lab’s [Jaffle shop](https://github.com/dbt-labs/jaffle_shop) on GitHub. It's a runnable project that contains sample configurations and helpful notes. + +If you want to see what a mature, production project looks like, check out the [GitLab Data Team public repo](https://gitlab.com/gitlab-data/analytics/-/tree/master/transform/snowflake-dbt). + +## Related docs +- [Best practices: How we structure our dbt projects](/guides/best-practices/how-we-structure/1-guide-overview) +* [Get started with dbt Cloud](/docs/get-started/getting-started/set-up-dbt-cloud) +* [Get started with dbt Core](/docs/get-started/getting-started-dbt-core) diff --git a/website/docs/docs/build/python-models.md b/website/docs/docs/build/python-models.md new file mode 100644 index 00000000000..3f213f1fdca --- /dev/null +++ b/website/docs/docs/build/python-models.md @@ -0,0 +1,648 @@ +--- +title: "Python models" +id: "python-models" +--- +- [Overview](#overview) +- [Configuring Python Models](#configuring-python-models) +- [Python Specific Functionality](#configuring-python-models) +- [Limitations](#limitations) +- [Supported data platforms](#supported-data-platforms) + +dbt Core v1.3 adds support for Python models. Note that only [specific data platforms](#specific-data-platforms) support dbt-py models. + +We encourage you to: +- Read [the original discussion](https://github.com/dbt-labs/dbt-core/discussions/5261) that proposed this feature. +- Contribute to [best practices for developing Python models in dbt](https://github.com/dbt-labs/docs.getdbt.com/discussions/1811). +- Weigh in on [next steps for Python models, beyond v1.3](https://github.com/dbt-labs/dbt-core/discussions/5742). +- Join the **#beta-feedback-python-models** channel in the [dbt Community Slack](https://www.getdbt.com/community/join-the-community/). + + +In the following article, you'll see a section titled "❓ **dbt questions**." We are excited to release the first set of functionality in v1.3, which will solve real use cases. We also know this is the first step toward a much wider field of possibility. We don't pretend to have all the answers. + +We're excited to keep developing our opinionated recommendations and next steps for product development, and we want your help. Comment in the GitHub discussions; leave thoughts in Slack; bring up dbt + Python in casual conversation with colleagues and friends. + + +## Overview + +dbt Python ("dbt-py") models will help you solve use cases that can't be solved with SQL. You can perform analyses using tools available in the open-source Python ecosystem, including state-of-the-art packages for data science and statistics. Before, you would have needed separate infrastructure and orchestration to run Python transformations in production. Python transformations defined in dbt are models in your project with all the same capabilities around testing, documentation, and lineage. + + + +Python models are supported in dbt Core 1.3 and higher. Learn more about [upgrading your version in dbt Cloud](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions) and [upgrading dbt Core versions](https://docs.getdbt.com/docs/core-versions#upgrading-to-new-patch-versions). + +To read more about Python models, change the [docs version to 1.3](/docs/build/python-models?version=1.3) (or higher) in the menu bar. + + + + + + + + +```python +import ... + +def model(dbt, session): + + my_sql_model_df = dbt.ref("my_sql_model") + + final_df = ... # stuff you can't write in SQL! + + return final_df +``` + + + + + +```yml +version: 2 + +models: + - name: my_python_model + + # Document within the same codebase + description: My transformation written in Python + + # Configure in ways that feel intuitive and familiar + config: + materialized: table + tags: ['python'] + + # Test the results of my Python transformation + columns: + - name: id + # Standard validation for 'grain' of Python results + tests: + - unique + - not_null + tests: + # Write your own validation logic (in SQL) for Python results + - [custom_generic_test](writing-custom-generic-tests) +``` + + + + + + +The prerequisites for dbt Python models include using an adapter for a data platform that supports a fully featured Python runtime. In a dbt Python model, all Python code is executed remotely on the platform. None of it is run by dbt locally. We believe in clearly separating _model definition_ from _model execution_. In this and many other ways, you'll find that dbt's approach to Python models mirrors its longstanding approach to modeling data in SQL. + +We've written this guide assuming that you have some familiarity with dbt. If you've never before written a dbt model, we encourage you to start by first reading [dbt Models](building-models). Throughout, we'll be drawing connections between Python models and SQL models, as well as making clear their differences. + +### What is a Python model? + +A dbt Python model is a function that reads in dbt sources or other models, applies a series of transformations, and returns a transformed dataset. DataFrame operations define the starting points, the end state, and each step along the way. + +This is similar to the role of CTEs in dbt SQL models. We use CTEs to pull in upstream datasets, define (and name) a series of meaningful transformations, and end with a final `select` statement. You can run the compiled version of a dbt SQL model to see the data included in the resulting view or table. When you `dbt run`, dbt wraps that query in `create view`, `create table`, or more complex DDL to save its results in the database. + +Instead of a final `select` statement, each Python model returns a final DataFrame. Each DataFrame operation is "lazily evaluated." In development, you can preview its data, using methods like `.show()` or `.head()`. When you run a Python model, the full result of the final DataFrame will be saved as a table in your data warehouse. + +dbt Python models have access to almost all of the same configuration options as SQL models. You can test them, document them, add `tags` and `meta` properties to them, grant access to their results to other users, and so on. You can select them by their name, their file path, configurations, whether they are upstream or downstream of another model, or if they have been modified compared to a previous project state. + +### Defining a Python model + +Each Python model lives in a `.py` file in your `models/` folder. It defines a function named **`model()`**, which takes two parameters: +- **`dbt`**: A class compiled by dbt Core, unique to each model, enables you to run your Python code in the context of your dbt project and DAG. +- **`session`**: A class representing your data platform’s connection to the Python backend. The session is needed to read in tables as DataFrames, and to write DataFrames back to tables. In PySpark, by convention, the `SparkSession` is named `spark`, and available globally. For consistency across platforms, we always pass it into the `model` function as an explicit argument called `session`. + +The `model()` function must return a single DataFrame. On Snowpark (Snowflake), this can be a Snowpark or pandas DataFrame. Via PySpark (Databricks + BigQuery), this can be a Spark, pandas, or pandas-on-Spark DataFrame. For more about choosing between pandas and native DataFrames, see [DataFrame API + syntax](#dataframe-api--syntax). + +When you `dbt run --select python_model`, dbt will prepare and pass in both arguments (`dbt` and `session`). All you have to do is define the function. This is how every single Python model should look: + + + +```python +def model(dbt, session): + + ... + + return final_df +``` + + + + +### Referencing other models + +Python models participate fully in dbt's directed acyclic graph (DAG) of transformations. Use the `dbt.ref()` method within a Python model to read data from other models (SQL or Python). If you want to read directly from a raw source table, use `dbt.source()`. These methods return DataFrames pointing to the upstream source, model, seed, or snapshot. + + + +```python +def model(dbt, session): + + # DataFrame representing an upstream model + upstream_model = dbt.ref("upstream_model_name") + + # DataFrame representing an upstream source + upstream_source = dbt.source("upstream_source_name", "table_name") + + ... +``` + + + +Of course, you can `ref()` your Python model in downstream SQL models, too: + + + +```sql +with upstream_python_model as ( + + select * from {{ ref('my_python_model') }} + +), + +... +``` + + + +## Configuring Python models + +Just like SQL models, there are three ways to configure Python models: +1. In `dbt_project.yml`, where you can configure many models at once +2. In a dedicated `.yml` file, within the `models/` directory +3. Within the model's `.py` file, using the `dbt.config()` method + +Calling the `dbt.config()` method will set configurations for your model within your `.py` file, similar to the `{{ config() }}` macro in `.sql` model files: + + + +```python +def model(dbt, session): + + # setting configuration + dbt.config(materialized="table") +``` + + + +There's a limit to how complex you can get with the `dbt.config()` method. It accepts _only_ literal values (strings, booleans, and numeric types). Passing another function or a more complex data structure is not possible. The reason is that dbt statically analyzes the arguments to `config()` while parsing your model without executing your Python code. If you need to set a more complex configuration, we recommend you define it using the [`config` property](resource-properties/config) in a YAML file. + +#### Accessing project context + +dbt Python models don't use Jinja to render compiled code. Python models have limited access to global project contexts compared to SQL models. That context is made available from the `dbt` class, passed in as an argument to the `model()` function. + +Out of the box, the `dbt` class supports: +- Returning DataFrames referencing the locations of other resources: `dbt.ref()` + `dbt.source()` +- Accessing the database location of the current model: `dbt.this()` (also: `dbt.this.database`, `.schema`, `.identifier`) +- Determining if the current model's run is incremental: `dbt.is_incremental` + +It is possible to extend this context by "getting" them via `dbt.config.get()` after they are configured in the [model's config](/reference/model-configs). This includes inputs such as `var`, `env_var`, and `target`. If you want to use those values to power conditional logic in your model, we require setting them through a dedicated `.yml` file config: + + + +```yml +version: 2 + +models: + - name: my_python_model + config: + materialized: table + target_name: "{{ target.name }}" + specific_var: "{{ var('SPECIFIC_VAR') }}" + specific_env_var: "{{ env_var('SPECIFIC_ENV_VAR') }}" +``` + + + +Then, within the model's Python code, use the `dbt.config.get()` function to _access_ values of configurations that have been set: + + + +```python +def model(dbt, session): + target_name = dbt.config.get("target_name") + specific_var = dbt.config.get("specific_var") + specific_env_var = dbt.config.get("specific_env_var") + + orders_df = dbt.ref("fct_orders") + + # limit data in dev + if target_name == "dev": + orders_df = orders_df.limit(500) +``` + + + +### Materializations + +Python models support dbt Materializations. To learn more about them visit the [Materializations page](/docs/build/materializations.md) + +## Python-specific functionality + +### Defining functions + +In addition to defining a `model` function, the Python model can import other functions or define its own. Here's an example on Snowpark, defining a custom `add_one` function: + + + +```python +def add_one(x): + return x + 1 + +def model(dbt, session): + dbt.config(materialized="table") + temps_df = dbt.ref("temperatures") + + # warm things up just a little + df = temps_df.withColumn("degree_plus_one", add_one(temps_df["degree"])) + return df +``` + + + +Currently, Python functions defined in one dbt model can't be imported and reused in other models. See the ["Code reuse"](#code-reuse) section for the potential patterns we're considering. + +### Using PyPI packages + +You can also define functions that depend on third-party packages so long as those packages are installed and available to the Python runtime on your data platform. See notes on "Installing Packages" for [specific data warehouses](#specific-data-warehouses). + +In this example, we use the `holidays` package to determine if a given date is a holiday in France. The code below uses the pandas API for simplicity and consistency across platforms. The exact syntax, and the need to refactor for multi-node processing, still vary. + + + +
+ + + +```python +import holidays + +def is_holiday(date_col): + # Chez Jaffle + french_holidays = holidays.France() + is_holiday = (date_col in french_holidays) + return is_holiday + +def model(dbt, session): + dbt.config( + materialized = "table", + packages = ["holidays"] + ) + + orders_df = dbt.ref("stg_orders") + + df = orders_df.to_pandas() + + # apply our function + # (columns need to be in uppercase on Snowpark) + df["IS_HOLIDAY"] = df["ORDER_DATE"].apply(is_holiday) + + # return final dataset (Pandas DataFrame) + return df +``` + + + +
+ +
+ + + +```python +import holidays + +def is_holiday(date_col): + # Chez Jaffle + french_holidays = holidays.France() + is_holiday = (date_col in french_holidays) + return is_holiday + +def model(dbt, session): + dbt.config( + materialized = "table", + packages = ["holidays"] + ) + + orders_df = dbt.ref("stg_orders") + + df = orders_df.to_pandas_on_spark() # Spark 3.2+ + # df = orders_df.toPandas() in earlier versions + + # apply our function + df["is_holiday"] = df["order_date"].apply(is_holiday) + + # convert back to PySpark + df = df.to_spark() # Spark 3.2+ + # df = session.createDataFrame(df) in earlier versions + + # return final dataset (PySpark DataFrame) + return df +``` + + + +
+ +
+ +#### Configuring packages + +We encourage you to configure required packages and versions so dbt can track them in project metadata. This configuration is required for the implementation on some platforms. If you need specific versions of packages, specify them. + + + +```python +def model(dbt, session): + dbt.config( + packages = ["numpy==1.23.1", "scikit-learn"] + ) +``` + + + + + +```yml +version: 2 + +models: + - name: my_python_model + config: + packages: + - "numpy==1.23.1" + - scikit-learn +``` + + + +#### User-defined functions (UDFs) + +You can use the `@udf` decorator or `udf` function to define an "anonymous" function and call it within your `model` function's DataFrame transformation. This is a typical pattern for applying more complex functions as DataFrame operations, especially if those functions require inputs from third-party packages. +- [Snowpark Python: Creating s](https://docs.snowflake.com/en/developer-guide/snowpark/python/creating-udfs.html) +- ["PySpark functions: udf"](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.udf.html) + + + +
+ + + +```python +import snowflake.snowpark.types as T +import snowflake.snowpark.functions as F +import numpy + +def register_udf_add_random(): + add_random = F.udf( + # use 'lambda' syntax, for simple functional behavior + lambda x: x + numpy.random.normal(), + return_type=T.FloatType(), + input_types=[T.FloatType()] + ) + return add_random + +def model(dbt, session): + + dbt.config( + materialized = "table", + packages = ["numpy"] + ) + + temps_df = dbt.ref("temperatures") + + add_random = register_udf_add_random() + + # warm things up, who knows by how much + df = temps_df.withColumn("degree_plus_random", add_random("degree")) + return df +``` + + + +**Note:** Due to a Snowpark limitation, it is not currently possible to register complex named UDFs within stored procedures and, therefore, dbt Python models. We are looking to add native support for Python UDFs as a project/DAG resource type in a future release. For the time being, if you want to create a "vectorized" Python UDF via the Batch API, we recommend either: +- Writing [`create function`](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-batch.html) inside a SQL macro, to run as a hook or run-operation +- [Registering from a staged file](https://docs.snowflake.com/ko/developer-guide/snowpark/reference/python/_autosummary/snowflake.snowpark.udf.html#snowflake.snowpark.udf.UDFRegistration.register_from_file) within your Python model code + +
+ +
+ + + +```python +from pyspark.sql.types as T +import pyspark.sql.functions as F +import numpy + +# use a 'decorator' for more readable code +@F.udf(returnType=T.DoubleType()) +def add_random(x): + random_number = numpy.random.normal() + return x + random_number + +def model(dbt, session): + dbt.config( + materialized = "table", + packages = ["numpy"] + ) + + temps_df = dbt.ref("temperatures") + + # warm things up, who knows by how much + df = temps_df.withColumn("degree_plus_random", add_random("degree")) + return df +``` + + + +
+ +
+ +#### Code reuse + +Currently, Python functions defined in one dbt model cannot be imported and reused in other models. This is something wdbt would like to support, so there are two patterns we're considering: +1. Creating and registering **"named" UDFs** — This process is different across data platforms and has some performance limitations. (Snowpark does support ["vectorized" UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-batch.html): Pandas-like functions that can be executed in parallel.) +2. **Private Python packages** — In addition to importing reusable functions from public PyPI packages, many data platforms support uploading custom Python assets and registering them as packages. The upload process looks different across platforms, but your code’s actual `import` looks the same. + +:::note ❓ dbt questions + +- Should dbt have a role in abstracting over UDFs? Should dbt support a new type of DAG node, `function`? Would the primary use case be code reuse across Python models or defining Python-language functions that can be called from SQL models? +- How can dbt help users when uploading or initializing private Python assets? Is this a new form of `dbt deps`? +- How can dbt support users who want to test custom functions? If defined as UDFs: "unit testing" in the database? If "pure" functions in packages: encourage adoption of `pytest`? + +💬 Discussion: ["Python models: package, artifact/object storage, and UDF management in dbt"](https://github.com/dbt-labs/dbt-core/discussions/5741) +::: + +### DataFrame API and syntax + +Over the past decade, most people writing data transformations in Python have adopted DataFrame as their common abstraction. dbt follows this convention by returning `ref()` and `source()` as DataFrames, and it expects all Python models to return a DataFrame. + +A DataFrame is a two-dimensional data structure (rows and columns). It supports convenient methods for transforming that data and creating new columns from calculations performed on existing columns. It also offers convenient ways for previewing data while developing locally or in a notebook. + +That's about where the agreement ends. There are numerous frameworks with their own syntaxes and APIs for DataFrames. The [pandas](https://pandas.pydata.org/docs/) library offered one of the original DataFrame APIs, and its syntax is the most common to learn for new data professionals. Most newer DataFrame APIs are compatible with pandas-style syntax, though few can offer perfect interoperability. This is true for Snowpark and PySpark, which have their own DataFrame APIs. + +When developing a Python model, you will find yourself asking these questions: + +**Why pandas?** — It's the most common API for DataFrames. It makes it easy to explore sampled data and develop transformations locally. You can “promote” your code as-is into dbt models and run it in production for small datasets. + +**Why _not_ pandas?** — Performance. pandas runs "single-node" transformations, which cannot benefit from the parallelism and distributed computing offered by modern data warehouses. This quickly becomes a problem as you operate on larger datasets. Some data platforms support optimizations for code written using pandas' DataFrame API, preventing the need for major refactors. For example, ["pandas on PySpark"](https://spark.apache.org/docs/latest/api/python/getting_started/quickstart_ps.html) offers support for 95% of pandas functionality, using the same API while still leveraging parallel processing. + +:::note ❓ dbt questions +- When developing a new dbt Python model, should we recommend pandas-style syntax for rapid iteration and then refactor? +- Which open source libraries provide compelling abstractions across different data engines and vendor-specific APIs? +- Should dbt attempt to play a longer-term role in standardizing across them? + +💬 Discussion: ["Python models: the pandas problem (and a possible solution)"](https://github.com/dbt-labs/dbt-core/discussions/5738) +::: + +## Limitations + +Python models have capabilities that SQL models do not. They also have some drawbacks compared to SQL models: + +- **Time and cost.** Python models are slower to run than SQL models, and the cloud resources that run them can be more expensive. Running Python requires more general-purpose compute. That compute might sometimes live on a separate service or architecture from your SQL models. **However:** We believe that deploying Python models via dbt—with unified lineage, testing, and documentation—is, from a human standpoint, **dramatically** faster and cheaper. By comparison, spinning up separate infrastructure to orchestrate Python transformations in production and different tooling to integrate with dbt is much more time-consuming and expensive. +- **Syntax differences** are even more pronounced. Over the years, dbt has done a lot, via dispatch patterns and packages such as `dbt_utils`, to abstract over differences in SQL dialects across popular data warehouses. Python offers a **much** wider field of play. If there are five ways to do something in SQL, there are 500 ways to write it in Python, all with varying performance and adherence to standards. Those options can be overwhelming. As the maintainers of dbt, we will be learning from state-of-the-art projects tackling this problem and sharing guidance as we develop it. +- **These capabilities are very new.** As data warehouses develop new features, we expect them to offer cheaper, faster, and more intuitive mechanisms for deploying Python transformations. **We reserve the right to change the underlying implementation for executing Python models in future releases.** Our commitment to you is around the code in your model `.py` files, following the documented capabilities and guidance we're providing here. + +As a general rule, if there's a transformation you could write equally well in SQL or Python, we believe that well-written SQL is preferable: it's more accessible to a greater number of colleagues, and it's easier to write code that's performant at scale. If there's a transformation you _can't_ write in SQL, or where ten lines of elegant and well-annotated Python could save you 1000 lines of hard-to-read Jinja-SQL, Python is the way to go. + +## Supported data platforms + +In their initial launch, Python models are supported on three of the most popular data platforms: Snowflake, Databricks, and BigQuery/GCP (via Dataproc). Both Databricks and GCP's Dataproc use PySpark as the processing framework. Snowflake uses its own framework, Snowpark, which has many similarities to PySpark. + + + +
+ +**Additional setup:** Snowpark Python is in Public Preview - Open and enabled by default for all accounts. You will need to [acknowledge and accept Snowflake Third Party Terms](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-packages.html#getting-started) to use Anaconda packages. + +**Installing packages:** Snowpark supports several popular packages via Anaconda. The complete list is at https://repo.anaconda.com/pkgs/snowflake/. Packages are installed at the time your model is being run. Different models can have different package dependencies. If you are using third-party packages, Snowflake recommends using a dedicated virtual warehouse for best performance rather than one with many concurrent users. + +**About "sprocs":** dbt submits Python models to run as "stored procedures," which some people call "sprocs" for short. By default, dbt will create a named sproc containing your model's compiled Python code, and then "call" it to execute. Snowpark has a Private Preview feature for "temporary" or "anonymous" stored procedures ([docs](https://docs.snowflake.com/en/LIMITEDACCESS/call-with.html)), which are faster and leave a cleaner query history. If this feature is enabled for your account, you can switch it on for your models by configuring `use_anonymous_sproc: True`. We plan to switch this on for all dbt + Snowpark Python models in a future release. + + + +```yml +# I asked Snowflake Support to enable this Private Preview feature, +# and now my dbt-py models run even faster! +models: + use_anonymous_sproc: True +``` + + + +**Docs:** ["Developer Guide: Snowpark Python"](https://docs.snowflake.com/en/developer-guide/snowpark/python/index.html) + +
+ +
+ +**Submission methods:** Databricks supports a few different mechanisms to submit PySpark code, each with relative advantages. Some are better for supporting iterative development, while others are better for supporting lower-cost production deployments. The options are: +- `all_purpose_cluster` (default): dbt will run your Python model using the cluster ID configured as `cluster` in your connection profile or for this specific model. These clusters are more expensive but also much more responsive. We recommend using an interactive all-purpose cluster for quicker iteration in development. + - `create_notebook: True`: dbt will upload your model's compiled PySpark code to a notebook in the namespace `/Shared/dbt_python_model/{schema}`, where `{schema}` is the configured schema for the model, and execute that notebook to run using the all-purpose cluster. The appeal of this approach is that you can easily open the notebook in the Databricks UI for debugging or fine-tuning right after running your model. Remember to copy any changes into your dbt `.py` model code before re-running. + - `create_notebook: False` (default): dbt will use the [Command API](https://docs.databricks.com/dev-tools/api/1.2/index.html#run-a-command), which is slightly faster. +- `job_cluster`: dbt will upload your model's compiled PySpark code to a notebook in the namespace `/Shared/dbt_python_model/{schema}`, where `{schema}` is the configured schema for the model, and execute that notebook to run using a short-lived jobs cluster. For each Python model, Databricks will need to spin up the cluster, execute the model's PySpark transformation, and then spin down the cluster. As such, job clusters take longer before and after model execution, but they're also less expensive, so we recommend these for longer-running Python models in production. To use the `job_cluster` submission method, your model must be configured with `job_cluster_config`, which defines key-value properties for `new_cluster`, as defined in the [JobRunsSubmit API](https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit). + +You can configure each model's `submission_method` in all the standard ways you supply configuration: + +```python +def model(dbt, session): + dbt.config( + submission_method="all_purpose_cluster", + create_notebook=True, + cluster_id="abcd-1234-wxyz" + ) + ... +``` +```yml +version: 2 +models: + - name: my_python_model + config: + submission_method: job_cluster + job_cluster_config: + spark_version: ... + node_type_id: ... +``` +```yml +# dbt_project.yml +models: + project_name: + subfolder: + # set defaults for all .py models defined in this subfolder + +submission_method: all_purpose_cluster + +create_notebook: False + +cluster_id: abcd-1234-wxyz +``` + +If not configured, `dbt-spark` will use the built-in defaults: the all-purpose cluster (based on `cluster` in your connection profile) without creating a notebook. The `dbt-databricks` adapter will default to the cluster configured in `http_path`. We encourage explicitly configuring the clusters for Python models in Databricks projects. + +**Installing packages:** When using all-purpose clusters, we recommend installing packages which you will be using to run your Python models. + +**Docs:** +- [PySpark DataFrame syntax](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.html) +- [Databricks: Introduction to DataFrames - Python](https://docs.databricks.com/spark/latest/dataframes-datasets/introduction-to-dataframes-python.html) + +
+ +
+ +The `dbt-bigquery` adapter uses a service called Dataproc to submit your Python models as PySpark jobs. That Python/PySpark code will read from your tables and views in BigQuery, perform all computation in Dataproc, and write the final result back to BigQuery. + +**Submission methods.** Dataproc supports two submission methods: `serverless` and `cluster`. Dataproc Serverless does not require a ready cluster, which saves on hassle and cost—but it is slower to start up, and much more limited in terms of available configuration. For example, Dataproc Serverless supports only a small set of Python packages, though it does include `pandas`, `numpy`, and `scikit-learn`. (See the full list [here](https://cloud.google.com/dataproc-serverless/docs/guides/custom-containers#example_custom_container_image_build), under "The following packages are installed in the default image"). Whereas, by creating a Dataproc Cluster in advance, you can fine-tune the cluster's configuration, install any PyPI packages you want, and benefit from faster, more responsive runtimes. + +Use the `cluster` submission method with dedicated Dataproc clusters you or your organization manage. Use the `serverless` submission method to avoid managing a Spark cluster. The latter may be quicker for getting started, but both are valid for production. + +**Additional setup:** +- Create or use an existing [Cloud Storage bucket](https://cloud.google.com/storage/docs/creating-buckets) +- Enable Dataproc APIs for your project + region +- If using the `cluster` submission method: Create or use an existing [Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) with the [Spark BigQuery connector initialization action](https://github.com/GoogleCloudDataproc/initialization-actions/tree/master/connectors#bigquery-connectors). (Google recommends copying the action into your own Cloud Storage bucket, rather than using the example version shown in the screenshot) + + + +The following configurations are needed to run Python models on Dataproc. You can add these to your [BigQuery profile](bigquery-profile) or configure them on specific Python models: +- `gcs_bucket`: Storage bucket to which dbt will upload your model's compiled PySpark code +- `dataproc_region`: GCP region in which you have enabled Dataproc (for example `us-central1`) +- `dataproc_cluster_name`: Name of Dataproc cluster to use for running Python model (executing PySpark job). Only required if `submission_method: cluster` + +```python +def model(dbt, session): + dbt.config( + submission_method="cluster", + dataproc_cluster_name="my-favorite-cluster" + ) + ... +``` +```yml +version: 2 +models: + - name: my_python_model + config: + submission_method: serverless +``` + +Any user or service account that runs dbt Python models will need the following permissions(in addition to the required BigQuery permissions) ([docs](https://cloud.google.com/dataproc/docs/concepts/iam/iam)): +``` +dataproc.clusters.use +dataproc.jobs.create +dataproc.jobs.get +dataproc.operations.get +storage.buckets.get +storage.objects.create +storage.objects.delete +``` + +**Installing packages:** If you are using a Dataproc Cluster (as opposed to Dataproc Serverless), you can add third-party packages while creating the cluster. + +Google recommends installing Python packages on Dataproc clusters via initialization actions: +- [How initialization actions are used](https://github.com/GoogleCloudDataproc/initialization-actions/blob/master/README.md#how-initialization-actions-are-used) +- [Actions for installing via `pip` or `conda`](https://github.com/GoogleCloudDataproc/initialization-actions/tree/master/python) + +You can also install packages at cluster creation time by [defining cluster properties](https://cloud.google.com/dataproc/docs/tutorials/python-configuration#image_version_20): `dataproc:pip.packages` or `dataproc:conda.packages`. + + + +**Docs:** +- [Dataproc overview](https://cloud.google.com/dataproc/docs/concepts/overview) +- [PySpark DataFrame syntax](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.html) + +
+ +
+ +
diff --git a/website/docs/docs/building-a-dbt-project/seeds.md b/website/docs/docs/build/seeds.md similarity index 80% rename from website/docs/docs/building-a-dbt-project/seeds.md rename to website/docs/docs/build/seeds.md index 96d891c9571..bd26c1fceea 100644 --- a/website/docs/docs/building-a-dbt-project/seeds.md +++ b/website/docs/docs/build/seeds.md @@ -7,10 +7,10 @@ id: "seeds" * [Seed properties](seed-properties) * [`seed` command](seed) -## Getting started -Seeds are CSV files in your dbt project (typically in your `seeds` directory), that dbt can load into your data warehouse using the `dbt seed` command. +## Overview +Seeds are CSV files in your dbt project (typically in your `seeds` directory), that dbt can load into your using the `dbt seed` command. -Seeds can be referenced in downstream models the same way as referencing models — by using the `ref` [function](dbt-jinja-functions/ref). +Seeds can be referenced in downstream models the same way as referencing models — by using the [`ref` function](/reference/dbt-jinja-functions/ref). Because these CSV files are located in your dbt repository, they are version controlled and code reviewable. Seeds are best suited to static data which changes infrequently. @@ -21,7 +21,7 @@ Good use-cases for seeds: Poor use-cases of dbt seeds: * Loading raw data that has been exported to CSVs -* Any kind of production data containing sensitive information. For example +* Any kind of production data containing sensitive information. For example personal identifiable information (PII) and passwords. @@ -78,12 +78,12 @@ Seeds are configured in your `dbt_project.yml`, check out the [seed configuratio You can document and test seeds in yaml by declaring properties — check out the docs on [seed properties](seed-properties) for more information. ## FAQs - - - - - - - - - + + + + + + + + + \ No newline at end of file diff --git a/website/docs/docs/building-a-dbt-project/snapshots.md b/website/docs/docs/build/snapshots.md similarity index 94% rename from website/docs/docs/building-a-dbt-project/snapshots.md rename to website/docs/docs/build/snapshots.md index c11f72e2e38..a204c0416c5 100644 --- a/website/docs/docs/building-a-dbt-project/snapshots.md +++ b/website/docs/docs/build/snapshots.md @@ -8,10 +8,10 @@ id: "snapshots" * [Snapshot properties](snapshot-properties) * [`snapshot` command](snapshot) -## Getting started +## Overview ### What are snapshots? -Commonly, analysts need to "look back in time" at some previous state of data in their mutable tables. While some source data systems are built in a way that makes accessing historical data possible, this is often not the case. dbt provides a mechanism, **snapshots**, which records changes to a mutable over time. +Analysts often need to "look back in time" at previous data states in their mutable tables. While some source data systems are built in a way that makes accessing historical data possible, this is not always the case. dbt provides a mechanism, **snapshots**, which records changes to a mutable over time. Snapshots implement [type-2 Slowly Changing Dimensions](https://en.wikipedia.org/wiki/Slowly_changing_dimension#Type_2:_add_new_row) over mutable source tables. These Slowly Changing Dimensions (or SCDs) identify how a row in a table changes over time. Imagine you have an `orders` table where the `status` field can be overwritten as the order is processed. @@ -72,6 +72,7 @@ When you run the [`dbt snapshot` command](snapshot): Snapshots can be referenced in downstream models the same way as referencing models — by using the [ref](ref) function. ## Example + To add a snapshot to your project: 1. Create a file in your `snapshots` directory with a `.sql` file extension, e.g. `snapshots/orders.sql` @@ -312,7 +313,7 @@ Snapshots cannot be rebuilt. As such, it's a good idea to put snapshots in a sep Your models should then select from these snapshots, treating them like regular data sources. As much as possible, snapshot your source data in its raw form and use downstream models to clean up the data #### Use the `source` function in your query. -This helps when understanding data lineage in your project. +This helps when understanding data lineage in your project. #### Include as many columns as possible. In fact, go for `select *` if performance permits! Even if a column doesn't feel useful at the moment, it might be better to snapshot it in case it becomes useful – after all, you won't be able to recreate the column later. @@ -325,7 +326,7 @@ If you apply business logic in a snapshot query, and this logic changes in the f Basically – keep your query as simple as possible! Some reasonable exceptions to these recommendations include: * Selecting specific columns if the table is wide. -* Doing light transformation to get data into a reasonable shape, for example, unpacking a JSON blob to flatten your source data into columns. +* Doing light transformation to get data into a reasonable shape, for example, unpacking a blob to flatten your source data into columns. ## Snapshot meta-fields @@ -343,8 +344,9 @@ Snapshot tables will be created as a clone of your sourc For the `timestamp` strategy, the configured `updated_at` column is used to populate the `dbt_valid_from`, `dbt_valid_to` and `dbt_updated_at` columns.
+ Details for the timestamp strategy -Snapshot query results at `2019-01-01 11:00`: +Snapshot query results at `2019-01-01 11:00` | id | status | updated_at | | -- | ------- | ---------------- | @@ -376,8 +378,9 @@ Snapshot results (note that `11:30` is not used anywhere): For the `check` strategy, the current timestamp is used to populate each column
+ Details for the check strategy -Snapshot query results at `2019-01-01 11:00`: +Snapshot query results at `2019-01-01 11:00` | id | status | | -- | ------- | @@ -406,9 +409,9 @@ Snapshot results: ## FAQs - - - - - - + + + + + + \ No newline at end of file diff --git a/website/docs/docs/building-a-dbt-project/using-sources.md b/website/docs/docs/build/sources.md similarity index 89% rename from website/docs/docs/building-a-dbt-project/using-sources.md rename to website/docs/docs/build/sources.md index d75d79d1baf..e5802ada6db 100644 --- a/website/docs/docs/building-a-dbt-project/using-sources.md +++ b/website/docs/docs/build/sources.md @@ -1,12 +1,13 @@ --- title: "Sources" -id: "using-sources" +id: "sources" +search_weight: "heavy" --- ## Related reference docs * [Source properties](source-properties) * [Source configurations](source-configs) -* [`{{ source() }}` jinja function](dbt-jinja-functions/source) +* [`{{ source() }}` jinja function](/reference/dbt-jinja-functions/source) * [`source freshness` command](commands/source) ## Using sources @@ -41,7 +42,7 @@ If you're not already familiar with these files, be sure to check out [the docum ### Selecting from a source -Once a source has been defined, it can be referenced from a model using the [`{{ source()}}` function](dbt-jinja-functions/source). +Once a source has been defined, it can be referenced from a model using the [`{{ source()}}` function](/reference/dbt-jinja-functions/source). @@ -84,7 +85,7 @@ You can also: - Add tests to sources - Add descriptions to sources, that get rendered as part of your documentation site -These should be familiar concepts if you've already added tests and descriptions to your models (if not check out the guides on [testing](building-a-dbt-project/tests) and [documentation](documentation)). +These should be familiar concepts if you've already added tests and descriptions to your models (if not check out the guides on [testing](/docs/build/tests) and [documentation](documentation)). @@ -117,11 +118,11 @@ sources: You can find more details on the available properties for sources in the [reference section](source-properties). ### FAQs - - - - - + + + + + ## Snapshotting source data freshness With a couple of extra configs, dbt can optionally snapshot the "freshness" of the data in your source tables. This is useful for understanding if your data pipelines are in a healthy state, and is a critical component of defining SLAs for your warehouse. @@ -186,6 +187,6 @@ The results of this query are used to determine whether the source is fresh or n ### FAQs - - - + + + diff --git a/website/docs/docs/building-a-dbt-project/building-models.md b/website/docs/docs/build/sql-models.md similarity index 57% rename from website/docs/docs/building-a-dbt-project/building-models.md rename to website/docs/docs/build/sql-models.md index 697e6894482..d4c8b3de23c 100644 --- a/website/docs/docs/building-a-dbt-project/building-models.md +++ b/website/docs/docs/build/sql-models.md @@ -1,6 +1,6 @@ --- -title: "dbt Models" -id: "building-models" +title: "SQL models" +id: "sql-models" --- ## Related reference docs @@ -13,16 +13,24 @@ id: "building-models" :::info Building your first models -If you're new to dbt, we recommend that you check out our [Getting Started Tutorial](tutorial/getting-started.md) to build your first dbt project with models. +If you're new to dbt, we recommend that you read the [Getting Started guide](/docs/get-started/getting-started/overview) to build your first dbt project with models. ::: -A model is a `select` statement. Models are defined in `.sql` files (typically in your `models` directory): + + +Starting in v1.3, dbt Core adds support for **Python models**. + +dbt's Python capabilities are an extension of its capabilities with SQL models. If you're new to dbt, we recommend that you read this page first, before reading: ["Python Models"](python-models) + + + +A SQL model is a `select` statement. Models are defined in `.sql` files (typically in your `models` directory): - Each `.sql` file contains one model / `select` statement -- The name of the file is used as the model name +- The model name is inherited from the filename. - Models can be nested in subdirectories within the `models` directory -When you execute the [`dbt run` command](run), dbt will build this model in your data warehouse by wrapping it in a `create view as` or `create table as` statement. +When you execute the [`dbt run` command](run), dbt will build this model by wrapping it in a `create view as` or `create table as` statement. For example, consider this `customers` model: @@ -56,7 +64,7 @@ left join customer_orders using (customer_id) -When you execute `dbt run`, dbt will build this as a named `customers` in your target schema: +When you execute `dbt run`, dbt will build this as a _view_ named `customers` in your target schema: ```sql create view dbt_alice.customers as ( @@ -87,23 +95,24 @@ create view dbt_alice.customers as ( ``` Why a _view_ named `dbt_alice.customers`? By default dbt will: -* create models as views -* build models in a target schema you define -* use your file name as the view or name in the database +* Create models as views +* Build models in a target schema you define +* Use your file name as the view or name in the database -You can use _configurations_ to change any of these behaviors — more on that below. +You can use _configurations_ to change any of these behaviors — more on that later. ### FAQs - - - - - + + + + + ## Configuring models Configurations are "model settings" that can be set in your `dbt_project.yml` file, _and_ in your model file using a `config` block. Some example configurations include: -* Change the [materialization](materializations) that a model uses — a determines the SQL that dbt uses to create the model in your warehouse. -* Build models into separate [schemas](using-custom-schemas). + +* Changing the that a model uses — a [materialization](materializations) determines the SQL that dbt uses to create the model in your warehouse. +* Build models into separate [schemas](/docs/build/custom-schemas). * Apply [tags](resource-configs/tags) to a model. Here's an example of model configuration: @@ -143,17 +152,17 @@ with customer_orders as ... -Importantly, configurations are applied hierarchically — a configuration applied to a subdirectory will override any general configurations. +It is important to note that configurations are applied hierarchically — a configuration applied to a subdirectory will override any general configurations. You can learn more about configurations in the [reference docs](model-configs). ### FAQs - - + + ## Building dependencies between models -By using the [`ref` function](ref) in the place of table names in a query, you can build dependencies between models. Use the name of another model as the argument for `ref`. +You can build dependencies between models by using the [`ref` function](ref) in place of table names in a query. Use the name of another model as the argument for `ref`. -* Manage separate environments — dbt will replace the model specified in the `ref` function with the database name for the (or view). Importantly, this is environment-aware — if you're running dbt with a target schema named `dbt_alice`, it will select from an upstream table in the same schema. Check out the tabs above to see this in action. +* Manage separate environments — dbt will replace the model specified in the `ref` function with the database name for the (or view). Importantly, this is environment-aware — if you're running dbt with a target schema named `dbt_alice`, it will select from an upstream table in the same schema. Check out the tabs above to see this in action. Additionally, the `ref` function encourages you to write modular transformations, so that you can re-use models, and reduce repeated code. ## Testing and documenting models -You can also document and test models — skip ahead to the section on [testing](building-a-dbt-project/tests) and [documentation](documentation) for more information. +You can also document and test models — skip ahead to the section on [testing](/docs/build/tests) and [documentation](/docs/collaborate/documentation) for more information. ## Additional FAQs - - - - - - - - - + + + + + + + + + diff --git a/website/docs/docs/building-a-dbt-project/tests.md b/website/docs/docs/build/tests.md similarity index 88% rename from website/docs/docs/building-a-dbt-project/tests.md rename to website/docs/docs/build/tests.md index 652525ca3fc..d9bdf3435bd 100644 --- a/website/docs/docs/building-a-dbt-project/tests.md +++ b/website/docs/docs/build/tests.md @@ -1,5 +1,6 @@ --- title: "Tests" +id: "tests" --- ## Related reference docs @@ -8,10 +9,12 @@ title: "Tests" * [Test configurations](test-configs) * [Test selection examples](test-selection-examples) -## Getting started +## Overview Tests are assertions you make about your models and other resources in your dbt project (e.g. sources, seeds and snapshots). When you run `dbt test`, dbt will tell you if each test in your project passes or fails. +You can use tests to improve the integrity of the SQL in each model by making assertions about the results generated. Out of the box, you can test whether a specified column in a model only contains non-null values, unique values, or values that have a corresponding value in another model (for example, a `customer_id` for an `order` corresponds to an `id` in the `customers` model), and values from a specified list. You can extend tests to suit business logic specific to your organization – any assertion that you can make about your model in the form of a select query can be turned into a test. + * `v0.20.0`: Both types of tests return a set of failing records. Previously, generic/schema tests returned a numeric value representing failures. Generic tests (f.k.a. schema tests) are defined using `test` blocks instead of macros prefixed `test_`. @@ -27,7 +30,7 @@ There are two ways of defining tests in dbt: Defining tests is a great way to confirm that your code is working correctly, and helps prevent regressions when your code changes. Because you can use them over and over again, making similar assertions with minor variations, generic tests tend to be much more common—they should make up the bulk of your dbt testing suite. That said, both ways of defining tests have their time and place. :::tip Creating your first tests -If you're new to dbt, we recommend that you check out our [Getting Started Tutorial](tutorial/getting-started.md) to build your first dbt project with models and tests. +If you're new to dbt, we recommend that you check out our [Getting Started guide](/docs/get-started/getting-started/overview) to build your first dbt project with models and tests. ::: ## Singular tests @@ -112,7 +115,7 @@ You can find more information about these tests, and additional configurations ( Those four tests are enough to get you started. You'll quickly find you want to use a wider variety of tests—a good thing! You can also install generic tests from a package, or write your own, to use (and reuse) across your dbt project. Check out the [guide on custom generic tests](custom-generic-tests) for more information. :::info -There are generic tests defined in some open source packages, such as [dbt-utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/) and [dbt-expectations](https://hub.getdbt.com/calogica/dbt_expectations/latest/) — skip ahead to the docs on [packages](package-management) to learn more! +There are generic tests defined in some open source packages, such as [dbt-utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/) and [dbt-expectations](https://hub.getdbt.com/calogica/dbt_expectations/latest/) — skip ahead to the docs on [packages](/docs/build/packages) to learn more! ::: ### Example @@ -236,7 +239,6 @@ where {{ column_name }} is null - ## Storing test failures @@ -252,22 +254,16 @@ This workflow allows you to query and examine failing records much more quickly Note that, if you elect to store test failures: -- Test result tables are created in a schema suffixed or named `dbt_test__audit`, by default. It is possible to change this value by setting a `schema` config. (For more details on schema naming, see [using custom schemas](using-custom-schemas).) +* Test result tables are created in a schema suffixed or named `dbt_test__audit`, by default. It is possible to change this value by setting a `schema` config. (For more details on schema naming, see [using custom schemas](/docs/build/custom-schemas).) - A test's results will always **replace** previous failures for the same test. ## FAQs - - - - - - - - - - - + + + + + + + + diff --git a/website/docs/docs/building-a-dbt-project/archival.md b/website/docs/docs/building-a-dbt-project/archival.md deleted file mode 100644 index 86fe28bb7e3..00000000000 --- a/website/docs/docs/building-a-dbt-project/archival.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -title: "Archives" -id: "archival" ---- - - -:::info Archives are now Snapshots! - -Archives have been renamed to "Snapshots" in dbt v0.14.0. Check out the docs on [Snapshots](snapshots) for more information. - -::: - - - - diff --git a/website/docs/docs/building-a-dbt-project/building-models/python-models.md b/website/docs/docs/building-a-dbt-project/building-models/python-models.md new file mode 100644 index 00000000000..4c25da2a10d --- /dev/null +++ b/website/docs/docs/building-a-dbt-project/building-models/python-models.md @@ -0,0 +1,713 @@ +--- +title: "Python models" +--- + +:::info Brand new! + +dbt Core v1.3 included first-ever support for Python models. Note that only [specific data platforms](#specific-data-platforms) support dbt-py models. + +We encourage you to: +- Read [the original discussion](https://github.com/dbt-labs/dbt-core/discussions/5261) that proposed this feature. +- Contribute to [best practices for developing Python models in dbt](https://discourse.getdbt.com/t/dbt-python-model-dbt-py-best-practices/5204 ). +- Weigh in on [next steps for Python models, beyond v1.3](https://github.com/dbt-labs/dbt-core/discussions/5742). +- Join the **#dbt-core-python-models** channel in the [dbt Community Slack](https://www.getdbt.com/community/join-the-community/). + +Below, you'll see sections entitled "❓ **Our questions**." We are excited to have released a first narrow set of functionality in v1.3, which will solve real use cases. We also know this is a first step into a much wider field of possibility. We don't pretend to have all the answers. We're excited to keep developing our opinionated recommendations and next steps for product development—and we want your help. Comment in the GitHub discussions; leave thoughts in Slack; bring up dbt + Python in casual conversation with colleagues and friends. +::: + +## About Python models in dbt + +dbt Python ("dbt-py") models will help you solve use cases that can't be solved with SQL. You can perform analyses using tools available in the open source Python ecosystem, including state-of-the-art packages for data science and statistics. Before, you would have needed separate infrastructure and orchestration to run Python transformations in production. By defining your Python transformations in dbt, they're just models in your project, with all the same capabilities around testing, documentation, and lineage. + + + +Python models are supported in dbt Core 1.3 and above. Learn more about [upgrading your version in dbt Cloud](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions) and [upgrading dbt Core versions](https://docs.getdbt.com/docs/core-versions#upgrading-to-new-patch-versions). + +To read more about Python models, change the docs version to 1.3 or higher in the menu above. + + + + + + + + +```python +import ... + +def model(dbt, session): + + my_sql_model_df = dbt.ref("my_sql_model") + + final_df = ... # stuff you can't write in SQL! + + return final_df +``` + + + + + +```yml +version: 2 + +models: + - name: my_python_model + + # Document within the same codebase + description: My transformation written in Python + + # Configure in ways that feel intuitive and familiar + config: + materialized: table + tags: ['python'] + + # Test the results of my Python transformation + columns: + - name: id + # Standard validation for 'grain' of Python results + tests: + - unique + - not_null + tests: + # Write your own validation logic (in SQL) for Python results + - [custom_generic_test](writing-custom-generic-tests) +``` + + + + + + +The prerequisites for dbt Python models include using an adapter for a data platform that supports a fully featured Python runtime. In a dbt Python model, all Python code is executed remotely on the platform. None of it is run by dbt locally. We believe in clearly separating _model definition_ from _model execution_. In this and many other ways, you'll find that dbt's approach to Python models mirrors its longstanding approach to modeling data in SQL. + +We've written this guide assuming that you have some familiarity with dbt. If you've never before written a dbt model, we encourage you to start by first reading [dbt Models](/docs/build/models). Throughout, we'll be drawing connections between Python models and SQL models, as well as making clear their differences. + +### What is a Python model? + +A dbt Python model is a function that reads in dbt sources or other models, applies a series of transformations, and returns a transformed dataset. DataFrame operations define the starting points, the end state, and each step along the way. + +This is similar to the role of CTEs in dbt SQL models. We use CTEs to pull in upstream datasets, define (and name) a series of meaningful transformations, and end with a final `select` statement. You can run the compiled version of a dbt SQL model to see the data included in the resulting view or table. When you `dbt run`, dbt wraps that query in `create view`, `create table`, or more complex DDL to save its results in the database. + +Instead of a final `select` statement, each Python model returns a final DataFrame. Each DataFrame operation is "lazily evaluated." In development, you can preview its data, using methods like `.show()` or `.head()`. When you run a Python model, the full result of the final DataFrame will be saved as a table in your data warehouse. + +dbt Python models have access to almost all of the same configuration options as SQL models. You can test them, document them, add `tags` and `meta` properties to them, grant access to their results to other users, and so on. You can select them by their name, their file path, their configurations, whether they are upstream or downstream of another model, or whether they have been modified compared to a previous project state. + +### Defining a Python model + +Each Python model lives in a `.py` file in your `models/` folder. It defines a function named **`model()`**, which takes two parameters: +- **`dbt`**: A class compiled by dbt Core, unique to each model, enables you to run your Python code in the context of your dbt project and DAG. +- **`session`**: A class representing your data platform’s connection to the Python backend. The session is needed to read in tables as DataFrames, and to write DataFrames back to tables. In PySpark, by convention, the `SparkSession` is named `spark`, and available globally. For consistency across platforms, we always pass it into the `model` function as an explicit argument called `session`. + +The `model()` function must return a single DataFrame. On Snowpark (Snowflake), this can be a Snowpark or pandas DataFrame. Via PySpark (Databricks + BigQuery), this can be a Spark, pandas, or pandas-on-Spark DataFrame. For more about choosing between pandas and native DataFrames, see [DataFrame API + syntax](#dataframe-api--syntax). + +When you `dbt run --select python_model`, dbt will prepare and pass in both arguments (`dbt` and `session`). All you have to do is define the function. This is how every single Python model should look: + + + +```python +def model(dbt, session): + + ... + + return final_df +``` + + + + +### Referencing other models + +Python models participate fully in dbt's directed acyclic graph (DAG) of transformations. Use the `dbt.ref()` method within a Python model to read in data from other models (SQL or Python). If you want to read directly from a raw source table, use `dbt.source()`. These methods return DataFrames pointing to the upstream source, model, seed, or snapshot. + + + +```python +def model(dbt, session): + + # DataFrame representing an upstream model + upstream_model = dbt.ref("upstream_model_name") + + # DataFrame representing an upstream source + upstream_source = dbt.source("upstream_source_name", "table_name") + + ... +``` + + + +Of course, you can `ref()` your Python model in downstream SQL models, too: + + + +```sql +with upstream_python_model as ( + + select * from {{ ref('my_python_model') }} + +), + +... +``` + + + +### Configuring Python models + +Just like SQL models, there are three ways to configure Python models: +1. In `dbt_project.yml`, where you can configure many models at once +2. In a dedicated `.yml` file, within the `models/` directory +3. Within the model's `.py` file, using the `dbt.config()` method + +Calling the `dbt.config()` method will set configurations for your model right within your `.py` file, similar to the `{{ config() }}` macro in `.sql` model files: + + + +```python +def model(dbt, session): + + # setting configuration + dbt.config(materialized="table") +``` + + + +There's a limit to how fancy you can get with the `dbt.config()` method. It accepts _only_ literal values (strings, booleans, and numeric types). Passing another function or a more complex data structure is not possible. The reason is that dbt statically analyzes the arguments to `config()` while parsing your model without executing your Python code. If you need to set a more complex configuration, we recommend you define it using the [`config` property](resource-properties/config) in a yaml file. + +#### Accessing project context + +dbt Python models don't use Jinja to render compiled code. Python models have limited access to global project contexts compared to SQL models. That context is made available from the `dbt` class, passed in as an argument to the `model()` function. + +Out of the box, the `dbt` class supports: +- Returning DataFrames referencing the locations of other resources: `dbt.ref()` + `dbt.source()` +- Accessing the database location of the current model: `dbt.this()` (also: `dbt.this.database`, `.schema`, `.identifier`) +- Determining if the current model's run is incremental: `dbt.is_incremental` + +It is possible to extend this context by "getting" them via `dbt.config.get()` after they are configured in the [model's config](/reference/model-configs). This includes inputs such as `var`, `env_var`, and `target`. If you want to use those values to power conditional logic in your model, we require setting them through a dedicated `.yml` file config: + + + +```yml +version: 2 + +models: + - name: my_python_model + config: + materialized: table + target_name: "{{ target.name }}" + specific_var: "{{ var('SPECIFIC_VAR') }}" + specific_env_var: "{{ env_var('SPECIFIC_ENV_VAR') }}" +``` + + + +Then, within the model's Python code, use the `dbt.config.get()` function to _access_ values of configurations that have been set: + + + +```python +def model(dbt, session): + target_name = dbt.config.get("target_name") + specific_var = dbt.config.get("specific_var") + specific_env_var = dbt.config.get("specific_env_var") + + orders_df = dbt.ref("fct_orders") + + # limit data in dev + if target_name == "dev": + orders_df = orders_df.limit(500) +``` + + + +### Materializations + +Python models support two materializations: +- `table` +- `incremental` + +Incremental Python models support all the same [incremental strategies](/docs/build/incremental-models#about-incremental_strategy) as their SQL counterparts. The specific strategies supported depend on your adapter. + +Python models can't be materialized as `view` or `ephemeral`. Python isn't supported for non-model resource types (like tests and snapshots). + +For incremental models, like SQL models, you will need to filter incoming tables to only new rows of data: + + + +
+ + + +```python +import snowflake.snowpark.functions as F + +def model(dbt, session): + dbt.config(materialized = "incremental") + df = dbt.ref("upstream_table") + + if dbt.is_incremental: + + # only new rows compared to max in current table + max_from_this = f"select max(updated_at) from {dbt.this}" + df = df.filter(df.updated_at >= session.sql(max_from_this).collect()[0][0]) + + # or only rows from the past 3 days + df = df.filter(df.updated_at >= F.dateadd("day", F.lit(-3), F.current_timestamp())) + + ... + + return df +``` + + + +
+ +
+ + + +```python +import pyspark.sql.functions as F + +def model(dbt, session): + dbt.config(materialized = "incremental") + df = dbt.ref("upstream_table") + + if dbt.is_incremental: + + # only new rows compared to max in current table + max_from_this = f"select max(updated_at) from {dbt.this}" + df = df.filter(df.updated_at >= session.sql(max_from_this).collect()[0][0]) + + # or only rows from the past 3 days + df = df.filter(df.updated_at >= F.date_add(F.current_timestamp(), F.lit(-3))) + + ... + + return df +``` + + + +
+ +
+ +**Note:** Incremental models are supported on BigQuery/Dataproc for the `merge` incremental strategy. The `insert_overwrite` strategy is not yet supported. + +## Python-specific functionality + +### Defining functions + +In addition to defining a `model` function, the Python model can import other functions or define its own. Here's an example, on Snowpark, defining a custom `add_one` function: + + + +```python +def add_one(x): + return x + 1 + +def model(dbt, session): + dbt.config(materialized="table") + temps_df = dbt.ref("temperatures") + + # warm things up just a little + df = temps_df.withColumn("degree_plus_one", add_one(temps_df["degree"])) + return df +``` + + + +At present, Python functions defined in one dbt model can't be imported and reused in other models. See the ["Code reuse"](#code-reuse) section for the potential patterns we're considering. + +### Using PyPI packages + +You can also define functions that depend on third-party packages, so long as those packages are installed and available to the Python runtime on your data platform. See notes on "Installing Packages" for [specific data warehouses](#specific-data-warehouses). + +In this example, we use the `holidays` package to determine if a given date is a holiday in France. For simplicity and consistency across platforms, the code below uses the pandas API. The exact syntax, and the need to refactor for multi-node processing, still varies. + + + +
+ + + +```python +import holidays + +def is_holiday(date_col): + # Chez Jaffle + french_holidays = holidays.France() + is_holiday = (date_col in french_holidays) + return is_holiday + +def model(dbt, session): + dbt.config( + materialized = "table", + packages = ["holidays"] + ) + + orders_df = dbt.ref("stg_orders") + + df = orders_df.to_pandas() + + # apply our function + # (columns need to be in uppercase on Snowpark) + df["IS_HOLIDAY"] = df["ORDER_DATE"].apply(is_holiday) + + # return final dataset (Pandas DataFrame) + return df +``` + + + +
+ +
+ + + +```python +import holidays + +def is_holiday(date_col): + # Chez Jaffle + french_holidays = holidays.France() + is_holiday = (date_col in french_holidays) + return is_holiday + +def model(dbt, session): + dbt.config( + materialized = "table", + packages = ["holidays"] + ) + + orders_df = dbt.ref("stg_orders") + + df = orders_df.to_pandas_on_spark() # Spark 3.2+ + # df = orders_df.toPandas() in earlier versions + + # apply our function + df["is_holiday"] = df["order_date"].apply(is_holiday) + + # convert back to PySpark + df = df.to_spark() # Spark 3.2+ + # df = session.createDataFrame(df) in earlier versions + + # return final dataset (PySpark DataFrame) + return df +``` + + + +
+ +
+ +#### Configuring packages + +We encourage you to explicitly configure required packages and versions so dbt can track them in project metadata. This configuration is required for the implementation on some platforms. If you need specific versions of packages, specify them. + + + +```python +def model(dbt, session): + dbt.config( + packages = ["numpy==1.23.1", "scikit-learn"] + ) +``` + + + + + +```yml +version: 2 + +models: + - name: my_python_model + config: + packages: + - "numpy==1.23.1" + - scikit-learn +``` + + + +#### UDFs + +You can use the `@udf` decorator or `udf` function to define an "anonymous" function and call it within your `model` function's DataFrame transformation. This is a typical pattern for applying more complex functions as DataFrame operations, especially if those functions require inputs from third-party packages. +- [Snowpark Python: Creating UDFs](https://docs.snowflake.com/en/developer-guide/snowpark/python/creating-udfs.html) +- [PySpark functions: udf](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.udf.html) + + + +
+ + + +```python +import snowflake.snowpark.types as T +import snowflake.snowpark.functions as F +import numpy + +def register_udf_add_random(): + add_random = F.udf( + # use 'lambda' syntax, for simple functional behavior + lambda x: x + numpy.random.normal(), + return_type=T.FloatType(), + input_types=[T.FloatType()] + ) + return add_random + +def model(dbt, session): + + dbt.config( + materialized = "table", + packages = ["numpy"] + ) + + temps_df = dbt.ref("temperatures") + + add_random = register_udf_add_random() + + # warm things up, who knows by how much + df = temps_df.withColumn("degree_plus_random", add_random("degree")) + return df +``` + + + +**Note:** Due to a Snowpark limitation, it is not currently possible to register complex named UDFs within stored procedures, and therefore dbt Python models. We are looking to add native support for Python UDFs as a project/DAG resource type in a future release. For the time being, if you want to create a "vectorized" Python UDF via the Batch API, we recommend either: +- Writing [`create function`](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-batch.html) inside a SQL macro, to run as a hook or run-operation +- [Registering from a staged file](https://docs.snowflake.com/ko/developer-guide/snowpark/reference/python/_autosummary/snowflake.snowpark.udf.html#snowflake.snowpark.udf.UDFRegistration.register_from_file) within your Python model code + +
+ +
+ + + +```python +from pyspark.sql.types as T +import pyspark.sql.functions as F +import numpy + +# use a 'decorator' for more readable code +@F.udf(returnType=T.DoubleType()) +def add_random(x): + random_number = numpy.random.normal() + return x + random_number + +def model(dbt, session): + dbt.config( + materialized = "table", + packages = ["numpy"] + ) + + temps_df = dbt.ref("temperatures") + + # warm things up, who knows by how much + df = temps_df.withColumn("degree_plus_random", add_random("degree")) + return df +``` + + + +
+ +
+ +#### Code reuse + +Currently, you cannot import or reuse Python functions defined in one dbt model, in other models. This is something we'd like dbt to support. There are two patterns we're considering: +1. Creating and registering **"named" UDFs**. This process is different across data platforms and has some performance limitations. (Snowpark does support ["vectorized" UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-batch.html): pandas-like functions that you can execute in parallel.) +2. Using **private Python packages**. In addition to importing reusable functions from public PyPI packages, many data platforms support uploading custom Python assets and registering them as packages. The upload process looks different across platforms, but your code’s actual `import` looks the same. + +:::note ❓ Our questions + +- Should dbt have a role in abstracting over UDFs? Should dbt support a new type of DAG node, `function`? Would the primary use case be code reuse across Python models or defining Python-language functions that can be called from SQL models? +- How can dbt help users when uploading or initializing private Python assets? Is this a new form of `dbt deps`? +- How can dbt support users who want to test custom functions? If defined as UDFs: "unit testing" in the database? If "pure" functions in packages: encourage adoption of `pytest`? + +💬 Discussion: ["Python models: package, artifact/object storage, and UDF management in dbt"](https://github.com/dbt-labs/dbt-core/discussions/5741) +::: + +### DataFrame API and syntax + +Over the past decade, most people writing data transformations in Python have adopted DataFrame as their common abstraction. dbt follows this convention by returning `ref()` and `source()` as DataFrames, and it expects all Python models to return a DataFrame. + +A DataFrame is a two-dimensional data structure (rows and columns). It supports convenient methods for transforming that data, creating new columns from calculations performed on existing columns. It also offers convenient ways for previewing data while developing locally or in a notebook. + +That's about where the agreement ends. There are numerous frameworks with their own syntaxes and APIs for DataFrames. The [pandas](https://pandas.pydata.org/docs/) library offered one of the original DataFrame APIs, and its syntax is the most common to learn for new data professionals. Most newer DataFrame APIs are compatible with pandas-style syntax, though few can offer perfect interoperability. This is true for Snowpark and PySpark, which have their own DataFrame APIs. + +When developing a Python model, you will find yourself asking these questions: + +**Why pandas?** It's the most common API for DataFrames. It makes it easy to explore sampled data and develop transformations locally. You can “promote” your code as-is into dbt models and run it in production for small datasets. + +**Why _not_ pandas?** Performance. pandas runs "single-node" transformations, which cannot benefit from the parallelism and distributed computing offered by modern data warehouses. This quickly becomes a problem as you operate on larger datasets. Some data platforms support optimizations for code written using pandas' DataFrame API, preventing the need for major refactors. For example, ["pandas on PySpark"](https://spark.apache.org/docs/latest/api/python/getting_started/quickstart_ps.html) offers support for 95% of pandas functionality, using the same API while still leveraging parallel processing. + +:::note ❓ Our questions +- When developing a new dbt Python model, should we recommend pandas-style syntax for rapid iteration and then refactor? +- Which open source libraries provide compelling abstractions across different data engines and vendor-specific APIs? +- Should dbt attempt to play a longer-term role in standardizing across them? + +💬 Discussion: ["Python models: the pandas problem (and a possible solution)"](https://github.com/dbt-labs/dbt-core/discussions/5738) +::: + +### Limitations + +Python models have capabilities that SQL models do not. They also have some drawbacks compared to SQL models: + +- **Time and cost.** Python models are slower to run than SQL models, and the cloud resources that run them can be more expensive. Running Python requires more general-purpose compute. That compute might sometimes live on a separate service or architecture from your SQL models. **However:** We believe that deploying Python models via dbt—with unified lineage, testing, and documentation—is, from a human standpoint, **dramatically** faster and cheaper. By comparison, spinning up separate infrastructure to orchestrate Python transformations in production and different tooling to integrate with dbt is much more time-consuming and expensive. +- **Syntax differences** are even more pronounced. Over the years, dbt has done a lot, via dispatch patterns and packages such as `dbt_utils`, to abstract over differences in SQL dialects across popular data warehouses. Python offers a **much** wider field of play. If there are five ways to do something in SQL, there are 500 ways to write it in Python, all with varying performance and adherence to standards. Those options can be overwhelming. As the maintainers of dbt, we will be learning from state-of-the-art projects tackling this problem and sharing guidance as we develop it. +- **These capabilities are very new.** As data warehouses develop new features, we expect them to offer cheaper, faster, and more intuitive mechanisms for deploying Python transformations. **We reserve the right to change the underlying implementation for executing Python models in future releases.** Our commitment to you is around the code in your model `.py` files, following the documented capabilities and guidance we're providing here. + +As a general rule, if there's a transformation you could write equally well in SQL or Python, we believe that well-written SQL is preferable: it's more accessible to a greater number of colleagues, and it's easier to write code that's performant at scale. If there's a transformation you _can't_ write in SQL, or where ten lines of elegant and well-annotated Python could save you 1000 lines of hard-to-read Jinja-SQL, Python is the way to go. + +## Specific data platforms + +In their initial launch, Python models are supported on three of the most popular data platforms: Snowflake, Databricks, and BigQuery/GCP (via Dataproc). Both Databricks and GCP's Dataproc use PySpark as the processing framework. Snowflake uses its own framework, Snowpark, which has many similarities to PySpark. + + + +
+ +**Additional setup:** You will need to [acknowledge and accept Snowflake Third Party Terms](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-packages.html#getting-started) to use Anaconda packages. + +**Installing packages:** Snowpark supports several popular packages via Anaconda. The complete list is at https://repo.anaconda.com/pkgs/snowflake/. Packages are installed at the time your model is being run. Different models can have different package dependencies. If you are using third-party packages, Snowflake recommends using a dedicated virtual warehouse for best performance rather than one with many concurrent users. + +**About "sprocs":** dbt submits Python models to run as "stored procedures," which some people call "sprocs" for short. By default, dbt will create a named sproc containing your model's compiled Python code, and then "call" it to execute. Snowpark has a Private Preview feature for "temporary" or "anonymous" stored procedures ([docs](https://docs.snowflake.com/en/LIMITEDACCESS/call-with.html)), which are faster and leave a cleaner query history. If this feature is enabled for your account, you can switch it on for your models by configuring `use_anonymous_sproc: True`. We plan to switch this on for all dbt + Snowpark Python models in a future release. + + + +```yml +# I asked Snowflake Support to enable this Private Preview feature, +# and now my dbt-py models run even faster! +models: + use_anonymous_sproc: True +``` + + + +**Docs:** ["Developer Guide: Snowpark Python"](https://docs.snowflake.com/en/developer-guide/snowpark/python/index.html) + +
+ +
+ +**Submission methods:** Databricks supports a few different mechanisms to submit PySpark code, each with relative advantages. Some are better for supporting iterative development, while others are better for supporting lower-cost production deployments. The options are: +- `all_purpose_cluster` (default): dbt will run your Python model using the cluster ID configured as `cluster` in your connection profile or for this specific model. These clusters are more expensive but also much more responsive. We recommend using an interactive all-purpose cluster for quicker iteration in development. + - `create_notebook: True`: dbt will upload your model's compiled PySpark code to a notebook in the namespace `/Shared/dbt_python_model/{schema}`, where `{schema}` is the configured schema for the model, and execute that notebook to run using the all-purpose cluster. The appeal of this approach is that you can easily open the notebook in the Databricks UI for debugging or fine-tuning right after running your model. Remember to copy any changes into your dbt `.py` model code before re-running. + - `create_notebook: False` (default): dbt will use the [Command API](https://docs.databricks.com/dev-tools/api/1.2/index.html#run-a-command), which is slightly faster. +- `job_cluster`: dbt will upload your model's compiled PySpark code to a notebook in the namespace `/Shared/dbt_python_model/{schema}`, where `{schema}` is the configured schema for the model, and execute that notebook to run using a short-lived jobs cluster. For each Python model, Databricks will need to spin up the cluster, execute the model's PySpark transformation, and then spin down the cluster. As such, job clusters take longer before and after model execution, but they're also less expensive, so we recommend these for longer-running Python models in production. To use the `job_cluster` submission method, your model must be configured with `job_cluster_config`, which defines key-value properties for `new_cluster`, as defined in the [JobRunsSubmit API](https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit). + +You can configure each model's `submission_method` in all the standard ways you supply configuration: + +```python +def model(dbt, session): + dbt.config( + submission_method="all_purpose_cluster", + create_notebook=True, + cluster_id="abcd-1234-wxyz" + ) + ... +``` +```yml +version: 2 +models: + - name: my_python_model + config: + submission_method: job_cluster + job_cluster_config: + spark_version: ... + node_type_id: ... +``` +```yml +# dbt_project.yml +models: + project_name: + subfolder: + # set defaults for all .py models defined in this subfolder + +submission_method: all_purpose_cluster + +create_notebook: False + +cluster_id: abcd-1234-wxyz +``` + +If not configured, `dbt-spark` will use the built-in defaults: the all-purpose cluster (based on `cluster` in your connection profile) without creating a notebook. The `dbt-databricks` adapter will default to the cluster configured in `http_path`. We encourage explicitly configuring the clusters for Python models in Databricks projects. + +**Installing packages:** When using all-purpose clusters, we recommend installing packages which you will be using to run your Python models. + +**Docs:** +- [PySpark DataFrame syntax](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.html) +- [Databricks: Introduction to DataFrames - Python](https://docs.databricks.com/spark/latest/dataframes-datasets/introduction-to-dataframes-python.html) + +
+ +
+ +The `dbt-bigquery` adapter uses a service called Dataproc to submit your Python models as PySpark jobs. That Python/PySpark code will read from your tables and views in BigQuery, perform all computation in Dataproc, and write the final result back to BigQuery. + +**Submission methods.** Dataproc supports two submission methods: `serverless` and `cluster`. Dataproc Serverless does not require a ready cluster, which saves on hassle and cost—but it is slower to start up, and much more limited in terms of available configuration. For example, Dataproc Serverless supports only a small set of Python packages, though it does include `pandas`, `numpy`, and `scikit-learn`. (See the full list [here](https://cloud.google.com/dataproc-serverless/docs/guides/custom-containers#example_custom_container_image_build), under "The following packages are installed in the default image"). Whereas, by creating a Dataproc Cluster in advance, you can fine-tune the cluster's configuration, install any PyPI packages you want, and benefit from faster, more responsive runtimes. + +Use the `cluster` submission method with dedicated Dataproc clusters you or your organization manage. Use the `serverless` submission method to avoid managing a Spark cluster. The latter may be quicker for getting started, but both are valid for production. + +**Additional setup:** +- Create or use an existing [Cloud Storage bucket](https://cloud.google.com/storage/docs/creating-buckets) +- Enable Dataproc APIs for your project + region +- If using the `cluster` submission method: Create or use an existing [Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) with the [Spark BigQuery connector initialization action](https://github.com/GoogleCloudDataproc/initialization-actions/tree/master/connectors#bigquery-connectors). (Google recommends copying the action into your own Cloud Storage bucket, rather than using the example version shown in the screenshot below.) + + + +The following configurations are needed to run Python models on Dataproc. You can add these to your [BigQuery profile](/reference/warehouse-setups/bigquery-setup#running-python-models-on-dataproc), or configure them on specific Python models: +- `gcs_bucket`: Storage bucket to which dbt will upload your model's compiled PySpark code. +- `dataproc_region`: GCP region in which you have enabled Dataproc (for example `us-central1`) +- `dataproc_cluster_name`: Name of Dataproc cluster to use for running Python model (executing PySpark job). Only required if `submission_method: cluster`. + +```python +def model(dbt, session): + dbt.config( + submission_method="cluster", + dataproc_cluster_name="my-favorite-cluster" + ) + ... +``` +```yml +version: 2 +models: + - name: my_python_model + config: + submission_method: serverless +``` + +Any user or service account that runs dbt Python models will need the following permissions, in addition to permissions needed for BigQuery ([docs](https://cloud.google.com/dataproc/docs/concepts/iam/iam)): +``` +dataproc.clusters.use +dataproc.jobs.create +dataproc.jobs.get +dataproc.operations.get +storage.buckets.get +storage.objects.create +storage.objects.delete +``` + +**Installing packages:** If you are using a Dataproc Cluster (as opposed to Dataproc Serverless), you can add third-party packages while creating the cluster. + +Google recommends installing Python packages on Dataproc clusters via initialization actions: +- [How initialization actions are used](https://github.com/GoogleCloudDataproc/initialization-actions/blob/master/README.md#how-initialization-actions-are-used) +- [Actions for installing via `pip` or `conda`](https://github.com/GoogleCloudDataproc/initialization-actions/tree/master/python) + +You can also install packages at cluster creation time by [defining cluster properties](https://cloud.google.com/dataproc/docs/tutorials/python-configuration#image_version_20): `dataproc:pip.packages` or `dataproc:conda.packages`. + + + +**Docs:** +- [Dataproc overview](https://cloud.google.com/dataproc/docs/concepts/overview) +- [PySpark DataFrame syntax](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.html) + +
+ +
+ +
diff --git a/website/docs/docs/building-a-dbt-project/metrics.md b/website/docs/docs/building-a-dbt-project/metrics.md deleted file mode 100644 index 3ca76eb0c63..00000000000 --- a/website/docs/docs/building-a-dbt-project/metrics.md +++ /dev/null @@ -1,139 +0,0 @@ ---- -title: "Metrics" -id: "metrics" -description: "When you define metrics in dbt projects, you encode crucial business logic in tested, version-controlled code. The dbt metrics layer helps you standardize metrics within your organization." -keywords: - - dbt metrics layer ---- - - - -* **v1.0.0**: Metrics are new and experimental - - - - -:::info Metrics are new -v1.0.0 includes an initial version of metrics, following a [vibrant community discussion](https://github.com/dbt-labs/dbt-core/issues/4071). Try them out, and let us know what you think! -::: - -:::caution Metrics are experimental -v1.0 includes metrics, but they should be considered an _unstable_ API because they are experimental and subject to change. We reserve the right to make breaking changes to the metrics schema in future **minor** versions, but will aim for backwards compatibility when possible. -::: - -## About Metrics - -A metric is a timeseries aggregation over a that supports zero or more dimensions. Some examples of metrics include: -- active users -- mrr (monthly recurring revenue) - -In v1.0, dbt supports metric definitions as a new node type. Like [exposures](exposures), metrics participate in the dbt DAG and can be expressed in YAML files. By defining metrics in dbt projects, you encode crucial business logic in tested, version-controlled code. Further, you can expose these metrics definitions to downstream tooling, which drives consistency and precision in metric reporting. - -### Benefits of defining metrics - -**Use metric specifications in downstream tools** -dbt's compilation context can access metrics via the [`graph.metrics` variable](graph). The [manifest artifact](manifest-json) includes metrics for downstream metadata consumption. - -**See and select dependencies** -As with Exposures, you can see everything that rolls up into a metric (`dbt ls -s +metric:*`), and visualize them in [dbt documentation](documentation). For more information, see "[The `metric:` selection method](node-selection/methods#the-metric-method)." - - - -## Declaring a metric - -You can define metrics in `.yml` files nested under a `metrics:` key. - - - -```yaml -# models/marts/product/schema.yml - -version: 2 - -models: - - name: dim_customers - ... - -metrics: - - name: new_customers - label: New Customers - model: ref('dim_customers') - description: "The number of paid customers using the product" - - type: count - sql: user_id # superfluous here, but shown as an example - - timestamp: signup_date - time_grains: [day, week, month] - - dimensions: - - plan - - country - - filters: - - field: is_paying - operator: 'is' - value: 'true' - - field: lifetime_value - operator: '>=' - value: '100' - - field: company_name - operator: '!=' - value: "'Acme, Inc'" - - field: signup_date - operator: '>=' - value: "'2020-01-01'" - - - meta: {team: Finance} -``` - - - -### Available properties - -| Field | Description | Example | Required? | -|-------------|-------------------------------------------------------------|---------------------------------|-----------| -| name | A unique identifier for the metric | new_customers | yes | -| model | The dbt model that powers this metric | dim_customers | yes | -| label | A short for name / label for the metric | New Customers | no | -| description | Long form, human-readable description for the metric | The number of customers who.... | no | -| type | The type of calculation to perform when evaluating a metric | count_distinct | yes | -| sql | The expression to aggregate/calculate over | user_id | yes | -| timestamp | The time-based component of the metric | signup_date | yes | -| time_grains | One or more "grains" at which the metric can be evaluated | [day, week, month] | yes | -| dimensions | A list of dimensions to group or filter the metric by | [plan, country] | no | -| filters | A list of filters to apply before calculating the metric | See below | no | -| meta | Arbitrary key/value store | {team: Finance} | no | - -### Filters -Filters should be defined as a list of dictionaries that define predicates for the metric. Filters are combined using AND clauses. For more control, users can (and should) include the complex logic in the model powering the metric. - -All three properties (`field`, `operator`, `value`) are required for each defined filter. - -Note that `value` must be defined as a string in YAML, because it will be compiled into queries as part of a string. If your filter's value needs to be surrounded in quotes inside the query (e.g. text or dates), use `"'nested'"` quotes: - -```yml - filters: - - field: is_paying - operator: 'is' - value: 'true' - - field: lifetime_value - operator: '>=' - value: '100' - - field: company_name - operator: '!=' - value: "'Acme, Inc'" - - field: signup_date - operator: '>=' - value: "'2020-01-01'" -``` - -## Ongoing discussions - -- Should metrics be defined on top of more strongly typed **attributes**, rather than columns? [dbt-core#4090](https://github.com/dbt-labs/dbt-core/issues/4090) -- Should metrics include support for joins? How should dbt know about foreign-key relationships between models? [dbt-core#4125](https://github.com/dbt-labs/dbt-core/issues/4125) -- Should metrics inherit configurations from the models on which they are defined? Should it be possible to define metrics directly on models/columns, like tests? - -These are just a start! We welcome you to check out open issues on GitHub, and join the conversation. - diff --git a/website/docs/docs/building-a-dbt-project/projects.md b/website/docs/docs/building-a-dbt-project/projects.md deleted file mode 100644 index 8ae94adf047..00000000000 --- a/website/docs/docs/building-a-dbt-project/projects.md +++ /dev/null @@ -1,94 +0,0 @@ ---- -title: "Projects" -id: "projects" ---- - -## Related reference docs -* [`dbt_project.yml` configurations](reference/dbt_project.yml.md) -* The [`dbt init` command](init) - -## Getting started -A dbt project is a directory of `.sql` and `.yml` files, which dbt uses to transform your data. At a minimum, a dbt project must contain: -* A project file: A `dbt_project.yml` file tells dbt that a particular directory is a dbt project, and also contains configurations for your project. -* [Models](building-models): A model is a single `.sql` file. Each model contains a single select statement that either transforms raw data into a dataset that is ready for analytics, or, more often, is an intermediate step in such a transformation. - -A project may also contain a number of other resources, such as [snapshots](snapshots), [seeds](seeds), [tests](building-a-dbt-project/tests), [macros](jinja-macros#macros), [documentation](documentation), and [sources](using-sources). - -## Creating a dbt project - -:::info Creating your first dbt project - -If you're new to dbt, we recommend that you check out our [Getting Started Tutorial](tutorial/getting-started.md) to build your first dbt project. - -::: - -If you don't yet have a dbt project, follow these instructions to create one. The dbt [starter project](https://github.com/dbt-labs/dbt-starter-project) contains default configurations as well as helpful notes. - - - - - -To create a new dbt project when developing in dbt Cloud: - -1. Create a dbt Cloud account [here](https://cloud.getdbt.com/signup/). If your organization already has a dbt Cloud account, ask an admin to add you as a Developer. -2. If you created a new account, a new project should automatically be created. If you were added to an existing account: - * Click the hamburger menu, then `Account Settings`, then `Projects`. - * Name your project, and click `Save`. There's no need to fill in the other details. - * Click the hamburger menu, and then `Home`. - * Switch the project in the header bar to your new "dbt Tutorial" project. -3. Complete the project setup flow: - * Connect to your data warehouse - * Add a repository — either choose a managed repository, or connect to an existing, but bare, repository. - - - -4. Go to the `Develop` interface by either: - * Selecting `Start Developing`, or - * Selecting the hamburger menu, and then `Develop`. -5. Select `Initialize a project` to create your project. You should see a directory structure with `.sql` and `.yml` files that were generated by the `init` command. - - - - -To create a new dbt project, run: - -```bash -$ dbt init [project_name] -``` - -This will create a new directory in your current path (i.e. at `./[project-name]`. - - - - - -### FAQs - - - - - -## Using an existing project - - - -If your organization already has a dbt Cloud account, and you wish to develop your project using dbt Cloud, ask an admin to add you as a Developer. - - -To work on an existing project: -1. Ensure that the project is checked into a hosted git repository (for example, on GitHub, GitLab or BitBucket) that you have access to. -2. Clone the repository to your computer. - - diff --git a/website/docs/docs/collaborate/cloud-build-and-view-your-docs.md b/website/docs/docs/collaborate/cloud-build-and-view-your-docs.md new file mode 100644 index 00000000000..d8fb539feca --- /dev/null +++ b/website/docs/docs/collaborate/cloud-build-and-view-your-docs.md @@ -0,0 +1,55 @@ +--- +title: "Build and view your docs with dbt Cloud" +id: "build-and-view-your-docs" +description: "Automatically generate project documentation as you run jobs." +--- + +dbt enables you to generate documentation for your project and data warehouse, and renders the documentation in a website. For more information, see [Documentation](/docs/collaborate/documentation). + +## Set up a documentation job + +You can set up documentation for a job in dbt Cloud when you edit your job settings or create a new job. You need to configure the job to generate docs when it runs, then link that job to your project. + +To set up a job to generate docs: + +1. In the top left, click **Deploy** and select **Jobs**. +2. Create a new job or select an existing job and click **Settings**. +3. Under "Execution Settings," select **Generate docs on run**. + + +4. Click **Save**. Proceed to [configure project documentation](#configure-project-documentation) so your project generates the documentation when this job runs. + +:::tip Tip — Documentation-only jobs + +To create and schedule documentation-only jobs at the end of your production jobs, add the `dbt compile` command in the **Commands** section. + +::: + +## Configure project documentation + +You configure project documentation to generate documentation when the job you set up in the previous section runs. In the project settings, specify the job that generates documentation artifacts for that project. Once you configure this setting, subsequent runs of the job will automatically include a step to generate documentation. + +1. Click the gear icon in the top right. +2. Select **Projects** and click the project that needs documentation. +3. Click **Edit**. +4. Under "Artifacts," select the job that should generate docs when it runs. + +5. Click **Save**. + +## Generating documentation + +To generate documentation in the IDE, run the `dbt docs generate` command in the +Command Bar in the IDE. This command will generate the Docs for your dbt project as it exists in development in your IDE session. + + + +After generating your documentation, you can click the **Book** icon above the file tree, to see the latest version of your documentation rendered in a new browser window. + +## Viewing documentation + +Once you set up a job to generate documentation for your project, you can click **Documentation** in the top left. Your project's documentation should open. This link will always navigate you to the most recent version of your project's documentation in dbt Cloud. + +The dbt Cloud IDE makes it possible to view [documentation](/docs/collaborate/documentation) +for your dbt project while your code is still in development. With this workflow, you can inspect and verify what your project's generated documentation will look like before your changes are released to production. + + diff --git a/website/docs/docs/building-a-dbt-project/documentation.md b/website/docs/docs/collaborate/documentation.md similarity index 78% rename from website/docs/docs/building-a-dbt-project/documentation.md rename to website/docs/docs/collaborate/documentation.md index 64af40883a6..dc9a3a6c848 100644 --- a/website/docs/docs/building-a-dbt-project/documentation.md +++ b/website/docs/docs/collaborate/documentation.md @@ -1,15 +1,18 @@ --- -title: "Documentation" +title: "About documentation" id: "documentation" --- ## Related documentation + * [Declaring properties](configs-and-properties) * [`dbt docs` command](cmd-docs) -* [`doc` Jinja function](dbt-jinja-functions/doc) +* [`doc` Jinja function](/reference/dbt-jinja-functions) +* If you're new to dbt, we recommend that you check out our [Getting Started Tutorial](/docs/get-started/getting-started/overview) to build your first dbt project, complete with documentation. ## Assumed knowledge -* [Tests](building-a-dbt-project/tests) + +* [Tests](/docs/build/tests) ## Overview @@ -17,21 +20,16 @@ Good documentation for your dbt models will help downstream consumers discover a dbt provides a way to generate documentation for your dbt project and render it as a website. The documentation for your project includes: * **Information about your project**: including model code, a DAG of your project, any tests you've added to a column, and more. -* **Information about your data warehouse**: including column data types, and sizes. This information is generated by running queries against the information schema. +* **Information about your **: including column data types, and sizes. This information is generated by running queries against the information schema. +Importantly, dbt also provides a way to add **descriptions** to models, columns, sources, and more, to further enhance your documentation. -Here's a screenshot of an example docs site (you can find the whole site [here](https://www.getdbt.com/mrr-playbook/#!/overview)): +Here's an example docs site: -Importantly, dbt also provides a way to add **descriptions** to models, columns, sources, and more, to further enhance your documentation. - -:::info Creating documentation for the first time -If you're new to dbt, we recommend that you check out our [Getting Started Tutorial](setting-up) to build your first dbt project, complete with documentation. -::: - ## Adding descriptions to your project -To add descriptions to your project, use the `description:` key in the same files where you declare [tests](building-a-dbt-project/tests), like so: +To add descriptions to your project, use the `description:` key in the same files where you declare [tests](/docs/build/tests), like so: @@ -68,12 +66,12 @@ First, run `dbt docs generate` — this command tells dbt to compile relevant in Then, run `dbt docs serve` to use these `.json` files to populate a local website. ## FAQs - - - - - - + + + + + + ## Using Docs Blocks @@ -185,7 +183,7 @@ From a docs page, you can click the green button in the bottom-right corner of t -In this example, the `fct_subscription_transactions` model only has one direct parent. By clicking the "Expand" button in the top-right corner of the window, we can pivot the graph horizontally and view the full lineage for our model. This lineage is filterable using the `--select` and `--exclude` flags, which are consistent with the semantics of [model selection syntax](node-selection/syntax). Further, you can right-click to interact with the DAG, jump to documentation, or share links to your graph visualization with your coworkers. +In this example, the `fct_subscription_transactions` model only has one direct parent. By clicking the "Expand" button in the top-right corner of the window, we can pivot the graph horizontally and view the full lineage for our model. This lineage is filterable using the `--select` and `--exclude` flags, which are consistent with the semantics of [model selection syntax](node-selection/syntax). Further, you can right-click to interact with the DAG, jump to documentation, or share links to your graph visualization with your coworkers. @@ -197,9 +195,9 @@ The `dbt docs serve` command is only intended for local/development hosting of t ::: -dbt's documentation website was built in a way that makes it easy to host on the web. The site itself is "static", meaning that you don't need any type of "dynamic" server to serve the docs. Some common methods for hosting the docs are: +dbt's documentation website was built to make it easy to host on the web. The site is "static,” meaning you don't need any "dynamic" servers to serve the docs. You can host your documentation in several ways: -1. [dbt Cloud](cloud-generating-documentation) -2. [Host on S3](https://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteHosting.html) (optionally [with IP access restrictions](https://docs.aws.amazon.com/AmazonS3/latest/dev/example-bucket-policies.html#example-bucket-policies-use-case-3)) -3. [Publish on Netlify](https://discourse.getdbt.com/t/publishing-dbt-docs-to-netlify/121) -4. Spin up a web server like Apache/Nginx +* Use [dbt Cloud](/docs/collaborate/documentation) +* Host on [Amazon S3](https://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteHosting.html) (optionally [with IP access restrictions](https://docs.aws.amazon.com/AmazonS3/latest/dev/example-bucket-policies.html#example-bucket-policies-use-case-3)) +* Publish with [Netlify](https://discourse.getdbt.com/t/publishing-dbt-docs-to-netlify/121) +* Use your own web server like Apache/Nginx diff --git a/website/docs/docs/guides/managing-environments.md b/website/docs/docs/collaborate/environments.md similarity index 82% rename from website/docs/docs/guides/managing-environments.md rename to website/docs/docs/collaborate/environments.md index f3d9035e3c7..c611056c9e1 100644 --- a/website/docs/docs/guides/managing-environments.md +++ b/website/docs/docs/collaborate/environments.md @@ -1,6 +1,6 @@ --- -title: "Managing environments" -id: "managing-environments" +title: "Environments" +id: "environments" --- ## What are environments? @@ -10,17 +10,21 @@ In software engineering, environments are used to enable engineers to develop an In traditional software engineering, different environments often use completely separate architecture. For example, the dev and prod versions of a website may use different servers and databases. -Data warehouses can also be designed to have separate environments – the _production_ environment refers to the relations (i.e. schemas, tables, and views) that your end users query (often through a BI tool). +Data warehouses can also be designed to have separate environments – the _production_ environment refers to the relations (i.e. schemas, tables, and views) that your end users query (often through a BI tool). ## How do I maintain different environments with dbt? dbt makes it easy to maintain separate production and development environments through the use of targets within a profile. A typical profile when using dbt locally (i.e. running from your command line) will have a target named `dev`, and have this set as the default. This means that while making changes, your objects will be built in your _development_ target, without affecting production queries made by your end users. Once you are confident in your changes, you can deploy the code to _production_, by running your dbt project with a _prod_ target. :::info Running dbt in production -You can learn more about different ways to run dbt in production in [this article](running-dbt-in-production) +You can learn more about different ways to run dbt in production in [this article](/docs/deploy/deployments). ::: Targets offer the flexibility to decide how to implement your separate environments – whether you want to use separate schemas, databases, or entirely different clusters altogether! We recommend using _different schemas within one data warehouse_ to separate your environments. This is the easiest to set up, and is the most cost effective solution in a modern cloud-based data stack. In practice, this means that most of the details in a target will be consistent across all targets, except for the `schema` and user credentials. If you have multiple dbt users writing code, it often makes sense for _each user_ to have their own _development_ environment. A pattern we've found useful is to set your dev target schema to be `dbt_`. User credentials should also differ across targets so that each dbt user is using their own data warehouse user. + +## Related docs +- [About dbt Core versions](/docs/dbt-versions/core) +- [Upgrade Core version in Cloud](/docs/dbt-versions/upgrade-core-in-cloud) diff --git a/website/docs/docs/collaborate/git-version-control.md b/website/docs/docs/collaborate/git-version-control.md new file mode 100644 index 00000000000..7006b8836f2 --- /dev/null +++ b/website/docs/docs/collaborate/git-version-control.md @@ -0,0 +1,24 @@ +--- +title: "About git" +id: git-version-control +description: "Git overview" +sidebar_label: "About git" +--- + +A [version control](https://en.wikipedia.org/wiki/Version_control) system allows you and your teammates to work collaboratively, safely, and simultaneously on a single project. Version control helps you track all the code changes made in your dbt project. + +In a distributed version control system, every developer has a full copy of the project and project history. Git is one of the most popular distributed version control systems and is commonly used for both open source and commercial software development, with great benefits for individuals, teams and businesses. + +![Git overview](https://docs.getdbt.com/img/docs/dbt-cloud/cloud-ide/git-overview.png) + + +Git allows developers see the entire timeline of their changes, decisions, and progression of any project in one place. From the moment they access the history of a project, the developer has all the context they need to understand it and start contributing. + +When you develop in the command line interface (CLI) or Cloud integrated development environment (IDE), you can leverage Git directly to version control your code. To use version control, make sure you are connected to a Git repository in the CLI or Cloud IDE. + + +## Related docs +- [Version control basics](/docs/collaborate/git/version-control-basics) +- [Resolve merge conflicts](/docs/collaborate/git/resolve-merge-conflicts) +- [Connect to GitHub](/docs/collaborate/git/connect-github) +- [Connect to GitLab](/docs/collaborate/git/connect-gitlab) diff --git a/website/docs/docs/collaborate/git/authenticate-azure.md b/website/docs/docs/collaborate/git/authenticate-azure.md new file mode 100644 index 00000000000..abac4fd1b59 --- /dev/null +++ b/website/docs/docs/collaborate/git/authenticate-azure.md @@ -0,0 +1,24 @@ +--- +title: "Authenticate with Azure DevOps" +id: "authenticate-azure" +description: "dbt Cloud developers need to authenticate with Azure DevOps." +sidebar_label: "Authenticate with Azure DevOps" +--- + + +If you use the dbt Cloud IDE to collaborate on your team's Azure DevOps dbt repo, you need to [link your dbt Cloud profile to Azure DevOps](#link-your-dbt-cloud-profile-to-azure-devops), which provides an extra layer of authentication. + +## Link your dbt Cloud profile to Azure DevOps + +Connect your dbt Cloud profile to Azure DevOps using OAuth: + +1. Click the gear icon at the top right and select **Profile settings**. +2. Click **Linked Accounts**. +3. Next to Azure DevOps, click **Link**. + + +4. Once you're redirected to Azure DevOps, sign into your account. +5. When you see the permission request screen from Azure DevOps App, click **Accept**. + + +You will be directed back to dbt Cloud, and your profile should be linked. You are now ready to develop in dbt Cloud! diff --git a/website/docs/docs/collaborate/git/connect-azure-devops.md b/website/docs/docs/collaborate/git/connect-azure-devops.md new file mode 100644 index 00000000000..22ecd12bbbf --- /dev/null +++ b/website/docs/docs/collaborate/git/connect-azure-devops.md @@ -0,0 +1,25 @@ +--- +title: "Connect to Azure DevOps" +id: "connect-azure-devops" +--- + + + + +## About Azure DevOps and dbt Cloud + +Connect your Azure DevOps cloud account in dbt Cloud to unlock new product experiences: + +- Import new Azure DevOps repos with a couple clicks during dbt Cloud project setup. +- Clone repos using HTTPS rather than SSH +- Enforce user authorization with OAuth 2.0. +- Carry Azure DevOps user repository permissions (read / write access) through to dbt Cloud IDE's git actions. +- Trigger Continuous integration (CI) builds when pull requests are opened in Azure DevOps. + + +To connect Azure DevOps in dbt Cloud: + +1. An account admin needs to [set up an Active Directory application and add it to dbt Cloud](/docs/collaborate/git/setup-azure). +2. dbt Cloud developers need to [personally authenticate with Azure DevOps](/docs/collaborate/git/authenticate-azure) from dbt Cloud. + + diff --git a/website/docs/docs/collaborate/git/connect-github.md b/website/docs/docs/collaborate/git/connect-github.md new file mode 100644 index 00000000000..74b416908d0 --- /dev/null +++ b/website/docs/docs/collaborate/git/connect-github.md @@ -0,0 +1,56 @@ +--- +title: "Connecting your GitHub account" +id: "connect-github" +sidebar_label: "Connecting GitHub" +--- + +## Overview + +Connecting your GitHub account to dbt Cloud provides convenience and another layer of security to dbt Cloud: +- Log into dbt Cloud using OAuth through GitHub. +- Import new GitHub repositories with a couple clicks during dbt Cloud project setup. +- Clone repos using HTTPS rather than SSH. +- Trigger [Continuous integration](/docs/deploy/cloud-ci-job) builds when pull requests are opened in GitHub. + + +To connect GitHub in dbt Cloud: +1. A GitHub organization owner must first [install the dbt Cloud application](/docs/collaborate/git/connect-github#installing-dbt-cloud-in-your-github-account) in your team's GitHub account. +2. All other dbt Cloud developers on the account need to [personally authenticate with GitHub](/docs/collaborate/git/connect-github#personally-authenticate-with-github) from dbt Cloud. + +If you are the GitHub organization owner tasked with the installation of the dbt Cloud app in step 1, you will also be automatically personally authenticated after completion, so step 2 will be taken care of. This means teams of one only need to complete step 1. + +:::info Use GitHub On-Premise? +If you're using an On-Premises GitHub deployment, this method will not work for your account. Please instead reference our docs on [importing a project by git URL](/docs/collaborate/git/import-a-project-by-git-url) to set up your connection. This alternative connection method does not include the benefits of the native integration mentioned above. +::: + +## Installing dbt Cloud in your GitHub account + +A GitHub organization owner needs to connect and configure the dbt Cloud app for their GitHub organization. If you are a team of one or wish to connect your personal GitHub account, then these instructions also apply to you. + +To link your dbt Cloud account to your team's GitHub account, navigate to Your Profile settings by clicking the gear icon in the top right. Select **Linked Accounts** from the left menu. + + + +In the Linked Accounts section, you can set up your GitHub account connection to dbt Cloud by clicking **Link** to the right of GitHub. This redirects you to your account on GitHub where you will be asked to install and configure the dbt Cloud application. Select the GitHub organization and repositories dbt Cloud should access. + + + +The dbt Cloud GitHub App requires the following permissions: +- Read access to metadata +- Read and write access to checks, code, commit statuses, pull requests, and workflows + +Once you grant access to the app, you will be redirected back to dbt Cloud and shown a linked account success state. You are now personally authenticated too, and your team members can begin [connecting their profiles](/docs/collaborate/git/connect-github#personally-authenticate-with-github). + +## Configuring the dbt Cloud app in your GitHub account +If you are your GitHub organization owner, you can also configure the dbt Cloud GitHub application to have access to only select repositories. This configuration must be done in GitHub, but we provide an easy link in dbt Cloud to start this process. + + +## Personally authenticate with GitHub +dbt Cloud developers on the Enterprise plan must each connect their GitHub profiles to dbt Cloud, as every developer's read / write access for the dbt repo is checked in the dbt Cloud IDE. dbt Cloud developers on the Team plan do not need to each connect their profiles to GitHub, but it is still recommended to do so. + +To connect a personal GitHub account, dbt Cloud developers should navigate to Your Profile settings by clicking the gear icon in the top right, then select **Linked Accounts** in the left menu. + +If your GitHub account is not connected, you’ll see "No connected account". Select **Link** to begin the setup process. You’ll be redirected to GitHub, and asked to authorize dbt Cloud in a grant screen. + + +Once you approve authorization, you will be redirected to dbt Cloud, and you should now see your connected account. The next time you log into dbt Cloud, you will be able to do so via OAuth through GitHub, and if you're on the Enterprise plan, you're ready to use the dbt Cloud IDE. diff --git a/website/docs/docs/collaborate/git/connect-gitlab.md b/website/docs/docs/collaborate/git/connect-gitlab.md new file mode 100644 index 00000000000..3ff5d6f4e56 --- /dev/null +++ b/website/docs/docs/collaborate/git/connect-gitlab.md @@ -0,0 +1,119 @@ +--- +title: "Connect to GitLab" +id: "connect-gitlab" +--- + +## Overview + +Connecting your GitLab account to dbt Cloud provides convenience and another layer of security to dbt Cloud: +- Import new GitLab repos with a couple clicks during dbt Cloud project setup. +- Clone repos using HTTPS rather than SSH. +- Carry GitLab user permissions through to dbt Cloud IDE's git actions. +- Trigger [Continuous integration](/docs/deploy/cloud-ci-job) builds when merge requests are opened in GitLab. + +The steps to integrate GitLab in dbt Cloud depend on your plan. If you are on: +- the Developer or Team plan, read these [instructions](#for-dbt-cloud-developer-and-team-tiers). +- the Enterprise plan, jump ahead to these [instructions](#for-the-dbt-cloud-enterprise-tier). + +## For dbt Cloud Developer and Team tiers + +To connect your GitLab account: +1. Navigate to Your Profile settings by clicking the gear icon in the top right. +2. Select **Linked Accounts** in the left menu. +3. Click **Link** to the right of your GitLab account. + + + +When you click **Link**, you will be redirected to GitLab and prompted to sign into your account. GitLab will then ask for your explicit authorization: + + + +Once you've accepted, you should be redirected back to dbt Cloud, and you'll see that your account has been linked to your profile. + + +## For the dbt Cloud Enterprise tier + +dbt Cloud enterprise customers have the added benefit of bringing their own GitLab OAuth application to dbt Cloud. This tier benefits from extra security, as dbt Cloud will: +- Enforce user authorization with OAuth. +- Carry GitLab's user repository permissions (read / write access) through to dbt Cloud IDE's git actions. + +In order to connect GitLab in dbt Cloud, a GitLab account admin must: +1. [Set up a GitLab OAuth application](#setting-up-a-gitlab-oauth-application). +2. [Add the GitLab app to dbt Cloud](#adding-the-gitlab-oauth-application-to-dbt-cloud). + +Once the admin completes those steps, dbt Cloud developers need to: +1. [Personally authenticate with GitLab](#personally-authenticating-with-gitlab) from dbt Cloud. + + +### Setting up a GitLab OAuth application +We recommend that before you set up a project in dbt Cloud, a GitLab account admin set up an OAuth application in GitLab for use in dbt Cloud. + +For more detail, GitLab has a [guide for creating a Group Application](https://docs.gitlab.com/ee/integration/oauth_provider.html#group-owned-applications). + +In GitLab, navigate to your group settings and select **Applications**. Here you'll see a form to create a new application. + + + +In GitLab, when creating your Group Application, input the following: + +| Field | Value | +| ------ | ----- | +| **Name** | dbt Cloud | +| **Redirect URI** | https://cloud.getdbt.com/complete/gitlab | +| **Confidential** | ✔️ | +| **Scopes** | ✔️ api | + +dbt Cloud single tenant customers need to replace **cloud.getdbt.com** with the hostname of +your dbt Cloud instance. + +The application form in GitLab should look as follows when completed: + + + +Click **Save application** in GitLab, and GitLab will then generate an **Application ID** and **Secret**. These values will be available even if you close the app screen, so this is not the only chance you have to save them. + +### Adding the GitLab OAuth application to dbt Cloud +After you've created your GitLab application, you need to provide dbt Cloud information about the app. In dbt Cloud, account admins should navigate to **Account Settings**, click on the **Integrations** tab, and expand the GitLab section. + + + +In dbt Cloud, input the following values: + +| Field | Value | +| ------ | ----- | +| **GitLab Instance** | https://gitlab.com | +| **Application ID** | *copy value from GitLab app* | +| **Secret** | *copy value from GitLab app* | + +Note, if you have a special hosted version of GitLab, modify the **GitLab Instance** to use the hostname provided for your organization instead - for example `https://gitlab.yourgreatcompany.com/`. + +Once the form is complete in dbt Cloud, click **Save**. + +You will then be redirected to GitLab and prompted to sign into your account. GitLab will ask for your explicit authorization: + + + +Once you've accepted, you should be redirected back to dbt Cloud, and your integration is ready for developers on your team to [personally authenticate with](#personally-authenticating-with-gitlab). + +### Personally authenticating with GitLab +dbt Cloud developers on the Enterprise plan must each connect their GitLab profiles to dbt Cloud, as every developer's read / write access for the dbt repo is checked in the dbt Cloud IDE. + +To connect a personal GitLab account, dbt Cloud developers should navigate to Your Profile settings by clicking the gear icon in the top right, then select **Linked Accounts** in the left menu. + +If your GitLab account is not connected, you’ll see "No connected account". Select **Link** to begin the setup process. You’ll be redirected to GitLab, and asked to authorize dbt Cloud in a grant screen. + + + +Once you approve authorization, you will be redirected to dbt Cloud, and you should see your connected account. You're now ready to start developing in the dbt Cloud IDE. + + +## Troubleshooting + +### Errors when importing a repository on dbt Cloud project set up +If you do not see your repository listed, double-check that: +- Your repository is in a Gitlab group you have access to. dbt Cloud will not read repos associated with a user. + +If you do see your repository listed, but are unable to import the repository successfully, double-check that: +- You are a maintainer of that repository. Only users with maintainer permissions can set up repository connections. + +If you imported a repository using the dbt Cloud native integration with GitLab, you should be able to see the clone strategy is using a `deploy_token`. If it's relying on an SSH key, this means the repository was not set up using the native GitLab integration, but rather using the generic git clone option. The repository must be reconnected in order to get the benefits described above. diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url.md b/website/docs/docs/collaborate/git/import-a-project-by-git-url.md similarity index 87% rename from website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url.md rename to website/docs/docs/collaborate/git/import-a-project-by-git-url.md index 38d3510d536..5d09e336ccd 100644 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url.md +++ b/website/docs/docs/collaborate/git/import-a-project-by-git-url.md @@ -1,6 +1,6 @@ --- -title: "Importing a project by git URL" -id: "cloud-import-a-project-by-git-url" +title: "Import a project by git URL" +id: "import-a-project-by-git-url" --- In dbt Cloud, you can import a git repository from any valid git URL that points to a dbt project. There are a couple of important considerations to keep in mind when doing this: @@ -17,7 +17,7 @@ After importing a project by Git URL, dbt Cloud will generate a Deploy Key for y :::info Use GitHub? -If you use GitHub, you can import your repo directly using [dbt Cloud's GitHub Application](cloud-installing-the-github-application). Connecting your repo via the GitHub Application [enables Continuous Integration](cloud-enabling-continuous-integration-with-github). +If you use GitHub, you can import your repo directly using [dbt Cloud's GitHub Application](/docs/collaborate/git/connect-github). Connecting your repo via the GitHub Application [enables Continuous Integration](/docs/deploy/cloud-ci-job). ::: @@ -31,7 +31,7 @@ See also: [Adding a deploy key in GitHub](https://github.blog/2015-06-16-read-o :::info Use GitLab? -If you use GitLab, you can import your repo directly using [dbt Cloud's GitLab Application](connecting-gitlab). Connecting your repo via the GitLab Application [enables Continuous Integration](cloud-enabling-continuous-integration-with-github). +If you use GitLab, you can import your repo directly using [dbt Cloud's GitLab Application](/docs/collaborate/git/connect-gitlab). Connecting your repo via the GitLab Application [enables Continuous Integration](/docs/deploy/cloud-ci-job). ::: @@ -63,6 +63,8 @@ Open the AWS CodeCommit console and choose your repository. Copy the SSH URL fro In the newly created Repository details page, you'll see a "Deploy Key" field. Copy the contents of this field as you'll need it for step 3. +**Note:** The dbt Cloud-generated public key is the only key that will work in the next step. Any other key that has been generated outside of dbt Cloud will not work. + #### Step 3: Grant dbt Cloud AWS User access Open up the newly created dbt Cloud user in the AWS IAM Console. Choose the "Security Credentials" tab and then click "Upload SSH public key". Paste in the contents of the "Public Key" field from the dbt Cloud Repository page. @@ -72,6 +74,11 @@ Once you've created the key, you'll see an "SSH key ID" for it. You'll need to w You're all set! Once support handles your request, your project is set up and you can begin executing dbt runs from dbt Cloud. ### Azure DevOps +:::info Use Azure DevOps? + +If you use Azure DevOps and you are on the dbt Cloud Enterprise plan, you can import your repo directly using [dbt Cloud's Azure DevOps Integration](/docs/collaborate/git/connect-azure-devops). Connecting your repo via the Azure DevOps Application [enables Continuous Integration](/docs/deploy/cloud-ci-job). + +::: To add a deploy key to an Azure DevOps account, navigate to the "SSH public keys" page in the User Settings of your user's Azure DevOps account or a service user's account. We recommend using a dedicated service user for the integration to ensure that dbt Cloud's connection to Azure DevOps is not interrupted by changes to user permissions. diff --git a/website/docs/docs/collaborate/git/managed-repository.md b/website/docs/docs/collaborate/git/managed-repository.md new file mode 100644 index 00000000000..d7beb38c4f5 --- /dev/null +++ b/website/docs/docs/collaborate/git/managed-repository.md @@ -0,0 +1,20 @@ +--- +title: "Managed repository" +id: "managed-repository" +--- + +If you do not already have a git repository for your dbt project, you can let dbt Cloud manage a repository for you. Managed repositories are a great way to trial dbt without needing to create a new repository. + +To set up a project with a managed repository: + +1. From your Account settings in dbt Cloud, select the project you want to set up with a managed repository. If the project already has a repository set up, you need to edit the repository settings and disconnect the existing repository. +2. Click **Edit** for the project. +3. Under Repository, click **Configure repository**. +4. Select **Managed**. +5. Enter a name for the repository. For example, "analytics" or "dbt-models." +6. Click **Create**. + + +dbt Cloud will host and manage this repository for you. If in the future you choose to host this repository yourself, you can contact support to have the contents of your repo transferred to you. + +** We do not recommend using a managed repository in a production environment. You will not be able to use git features like pull requests which are part of our recommended version control best practices. diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories.md b/website/docs/docs/collaborate/git/pr-template.md similarity index 72% rename from website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories.md rename to website/docs/docs/collaborate/git/pr-template.md index b2e35671550..b6a9493ee27 100644 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories.md +++ b/website/docs/docs/collaborate/git/pr-template.md @@ -1,9 +1,9 @@ --- -title: "Configuring repositories" -id: "cloud-configuring-repositories" +title: "PR template" +id: "pr-template" --- -### Configuring PR template URLs +## Configure pull request (PR) template URLs When changes are committed on a branch in the IDE, dbt Cloud can prompt users to open a new Pull Request for the code changes. To enable this functionality, ensure @@ -11,7 +11,7 @@ that a PR Template URL is configured in the Repository details page in your Account Settings. **Note:** If this template URL is unset, then the IDE will instead show a prompt to merge the changes directly into the `master` branch. - + The PR template URL supports two variables which can be used to build a URL string. These variables, `{{source}}` and `{{destination}}` return branch names based on the @@ -44,10 +44,18 @@ https://github.com/dbt-labs/jaffle_shop/compare/master..my-branch +## Configure custom branches + +By default in Development Environments, dbt Cloud attempts to reference the `main` branch in connected repositories. If you want to use a different default branch name, you can configure dbt Cloud with a custom branch setting. + +For example, you can use the `develop` branch of a connected repository. Edit an environment, then in "General settings" select **Only run on a custom branch** , and in "Custom branch" type **develop** or the name of your custom branch. + + + ## Example templates Some common URL templates are provided below, but please note that the exact -value may vary depending on your configured git provider. +value may vary depending on your configured git provider. ### GitHub ``` diff --git a/website/docs/docs/dbt-cloud/cloud-ide/handling-merge-conflicts.md b/website/docs/docs/collaborate/git/resolve-merge-conflicts.md similarity index 97% rename from website/docs/docs/dbt-cloud/cloud-ide/handling-merge-conflicts.md rename to website/docs/docs/collaborate/git/resolve-merge-conflicts.md index c9f48a0f13d..1058046856c 100644 --- a/website/docs/docs/dbt-cloud/cloud-ide/handling-merge-conflicts.md +++ b/website/docs/docs/collaborate/git/resolve-merge-conflicts.md @@ -1,15 +1,15 @@ --- -title: "Handling Merge Conflicts" -id: "handling-merge-conflicts" +title: "Resolve merge conflicts" +id: "resolve-merge-conflicts" --- Merge conflicts often occur when multiple users are concurrently making edits to the same section in the same file. This makes it difficult for Git to determine which change should be kept. The merge conflict process gives users the ability to sort out which lines of code should be kept and committed. Here we'll show you how you'd resolve merge conflicts in the IDE! ## Running into a merge conflict -In this example we have a column that represents the total number of orders your customer has ever had. It's currently named `number_of_orders` but your stakeholder feels like this could be named better. +In this example we have a column that represents the total number of orders your customer has ever had. It's currently named `number_of_orders` but your stakeholder feels like this could be named better. -You and a teammate make changes to the column name, but have gone two separate routes. You rename this column to `total_number_of_orders` and your teammate has renamed it `historical_order_count`. +You and a teammate make changes to the column name, but have gone two separate routes. You rename this column to `total_number_of_orders` and your teammate has renamed it `historical_order_count`. Your teammate has committed their change first, so you encounter a merge conflict when you press commit. diff --git a/website/docs/docs/collaborate/git/setup-azure.md b/website/docs/docs/collaborate/git/setup-azure.md new file mode 100644 index 00000000000..a4aa73b6aef --- /dev/null +++ b/website/docs/docs/collaborate/git/setup-azure.md @@ -0,0 +1,292 @@ +--- +title: "Set up Azure DevOps" +id: "setup-azure" +description: "You can set up your Azure DevOps by creating an Azure AD app and adding it to dbt Cloud." +sidebar_label: "Set up Azure DevOps" +--- + + + +## Overview + +To use our native integration with Azure DevOps in dbt Cloud, an account admin needs to set up an Azure Active Directory (Azure AD) app. We recommend setting up a separate [Azure AD application than used for SSO](/docs/collaborate/manage-access/set-up-sso-azure-active-directory). + +1. [Register an Azure AD app](#register-an-azure-ad-app). +2. [Add permissions to your new app](#add-permissions-to-your-new-app). +3. [Add another redirect URI](#add-another-redirect-URI). +4. [Connect Azure DevOps to your new app](#connect-azure-devops-to-your-new-app). +5. [Add your Azure AD app to dbt Cloud](#add-your-azure-ad-app-to-dbt-cloud). + +Once the the Azure AD app is added to dbt Cloud, an account admin must also connect a service user via OAuth, which will be used to power headless actions in dbt Cloud such as deployment runs and CI. +1. [Connecting a Service User](#connecting-a-service-user). + + +Once the Azure AD app is added to dbt Cloud and the service user is connected, then dbt Cloud developers can personally authenticate in dbt Cloud from Azure DevOps. For more on this, see [Authenticate with Azure DevOps](/docs/collaborate/git/authenticate-azure). + +## Register an Azure AD app + +1. Sign into your Azure portal and click **Azure Active Directory** under Azure services. +2. Select **App registrations** in the left panel. +3. Select **New registration**. The form for creating a new Active Directory app opens. +4. Provide a name for your app. We recommend using, "dbt Labs Azure DevOps App". +5. Select **Accounts in any organizational directory (Any Azure AD directory - Multitenant)** as the Supported Account Types. +Many customers ask why they need to select Multitenant instead of Single tenant, and they frequently get this step wrong. Microsoft considers Azure DevOps (formerly called Visual Studio) and Azure Active Directory as separate tenants, and in order for this Active Directory application to work properly, you must select Multitenant. +6. Add a redirect URI by selecting **Web** and typing in `https://cloud.getdbt.com/complete/azure_active_directory`. If you have a custom dbt Cloud URL be sure to use the appropriate domain. +7. Click **Register**. + + + +Here's what your app should look before registering it: + + + + +## Add permissions to your new app + +Provide your new app access to Azure DevOps: + +1. Select **API permissions** in the left navigation panel. +2. Remove the **Microsoft Graph / User Read** permission. +3. Click **Add a permission**. +4. Select **Azure DevOps**. +5. Select the **user_impersonation** permission. This is the only permission available for Azure DevOps. + + + +## Add another redirect URI + +You also need to add another redirect URI to your Azure AD application. This redirect URI will be used to authenticate the service user for headless actions in deployment environments. + +1. Navigate to your Azure AD application. +2. Select the link next to **Redirect URIs** +3. Click **Add URI** and add the URI, making sure to use the appropriate domain if you have a custom dbt Cloud URL: +`https://cloud.getdbt.com/complete/azure_active_directory_service_user` +4. Click **Save**. + + + + + + +## Connect Azure DevOps to your new app +If you have already connected your Azure DevOps account to Active Directory, then you can proceed to [Connecting a service user](#connecting-a-service-user). However, if you're just getting set up, connect Azure DevOps to the Active Directory App you just created: + +1. From your Azure DevOps account, select **Organization settings** in the bottom left. +2. Navigate to Azure Active Directory. +3. Click **Connect directory**. +4. Select the directory you want to connect. +5. Click **Connect**. + + + +## Add your Azure AD app to dbt Cloud + +Once you connect your Azure AD app and Azure DevOps, you need to provide dbt Cloud information about the app: + +1. Navigate to your account settings in dbt Cloud. +2. Select **Integrations**. +3. Scroll to the Azure DevOps section. +4. Complete the form: + - **Azure DevOps Organization:** Must match the name of your Azure DevOps organization exactly. Do not include the `dev.azure.com/` prefix in this field. ✅ Use `my-devops-org` ❌ Avoid `dev.azure.com/my-devops-org` + - **Application (client) ID:** Found in the Azure AD App. + - **Client Secrets:** You need to first create a secret in the Azure AD App under **Client credentials**. Make sure to copy the **Value** field in the Azure AD App and paste it in the **Client Secret** field in dbt Cloud. You are responsible for the Azure AD app secret expiration and rotation. + - **Directory(tenant) ID:** Found in the Azure AD App. + + +Your Azure AD app should now be added to your dbt Cloud Account. People on your team who want to develop in dbt Cloud's IDE can now personally [authorize Azure DevOps from their profiles](/docs/collaborate/git/authenticate-azure). + +## Connecting a service user + +Because Azure DevOps forces all authentication to be linked to a user's permissions, we recommend you create a "service user" in Azure DevOps whose permissions will be used to power headless actions in dbt Cloud such as dbt Cloud project repo selection, deployment runs, and CI. A service user is a pseudo user set up in the same way an admin would set up a real user, but it's given permissions specifically scoped for service to service interactions. You should avoid linking authentication to a real Azure DevOps user because if this person leaves your organization, dbt Cloud will lose privileges to the dbt Azure DevOps repositories, causing production runs to fail. + +### More on Service Users + +A service user account must have the following Azure DevOps permissions for all Azure DevOps projects and repos you want accessible in dbt Cloud. Read more about how dbt Cloud uses each permission in the following paragraphs. + + - **Project Reader** + - **ViewSubscriptions** + - **EditSubscriptions** + - **DeleteSubscriptions** * + - **PullRequestContribute** + - **GenericContribute** + +\* Note: **DeleteSubscriptions** permission might be included in **EditSubscriptions** depending on your version of Azure. + +Some of these permissions are only accessible via the Azure DevOps API, for which documentation can be found [here](https://docs.microsoft.com/en-us/azure/devops/organizations/security/namespace-reference?view=azure-devops). We’ve also detailed more information on Azure DevOps API usage below to help accelerate the set up. Alternatively, you can use the Azure DevOps UI to enable permissions, but you cannot get the least permissioned set. + +:::info Provide the service user with required permissions before setting up a dbt Cloud project +This service user's permissions will also power which repositories a team can select from during dbt project set up, so an Azure DevOps admin must grant at minimum Project Reader access to the service user before setting up a project in dbt Cloud. +::: + +
+ ViewSubscriptions +

+ +**Security Namespace ID:** cb594ebe-87dd-4fc9-ac2c-6a10a4c92046 + +**Namespace:** ServiceHooks + +**Permission:** +```json +{ + "bit": 1, + "displayName": "View Subscriptions", + "name": "ViewSubscriptions" +} +``` + +**Uses:** To view existing Azure DevOps service hooks subscriptions + +**Token (where applicable - API only):** +- PublisherSecurity for access to all projects +- PublisherSecurity/ for per project access + +**UI/API:** API only + +
+ +
+ EditSubscriptions +

+ +**Security Namespace ID:** cb594ebe-87dd-4fc9-ac2c-6a10a4c92046 + +**Namespace:** ServiceHooks + +**Permission:** +```json +{ + "bit": 2, + "displayName": "Edit Subscription", + "name": "EditSubscriptions" +} + +``` + +**Uses:** To add or update existing Azure DevOps service hooks subscriptions + +**Token (where applicable - API only):** +- PublisherSecurity for access to all projects +- PublisherSecurity/ for per project access + +**UI/API:** API only + +
+ +
+ DeleteSubscriptions +

+ +**Security Namespace ID:** cb594ebe-87dd-4fc9-ac2c-6a10a4c92046 + +**Namespace:** ServiceHooks + +**Permission:** +```json +{ + "bit": 4, + "displayName": "Delete Subscriptions", + "name": "DeleteSubscriptions" +} + + +``` + +**Uses:** To delete any redundant Azure DevOps service hooks subscriptions + + +**Token (where applicable - API only):** +- PublisherSecurity for access to all projects +- PublisherSecurity/ for per project access + +**UI/API:** API only + +**Additional Notes:** This permission has been deprecated in recent Azure DevOps versions. Edit Subscriptions (bit 2) has Delete permissions. + + +
+ +
+ PullRequestContribute +

+ +**Security Namespace ID:** 2e9eb7ed-3c0a-47d4-87c1-0ffdd275fd87 + +**Namespace:** Git Repositories + +**Permission:** +```json +{ + "bit": 16384, + "displayName": "Contribute to pull requests", + "name": "PullRequestContribute" +} + +``` + +**Uses:** To post Pull Request statuses to Azure DevOps + + +**Token (where applicable - API only):** +- repoV2 for access to all projects +- repoV2/ for per project access +- repoV2// for per repo access + + +**UI/API:** UI and API + +**Additional Notes:** This permission is automatically inherited if Project Reader/Contributor/Administrator is set in the UI. + + +
+ +
+ GenericContribute +

+ +**Security Namespace ID:** 2e9eb7ed-3c0a-47d4-87c1-0ffdd275fd87 + +**Namespace:** Git Repositories + +**Permission:** +```json +{ + "bit": 4, + "displayName": "Contribute", + "name": "GenericContribute" +} + + +``` + +**Uses:** To post commit statuses to Azure DevOps + + +**Token (where applicable - API only):** +- repoV2 for access to all projects +- repoV2/ for access to a single project at a time +- repoV2// for access to a single repo at a time + + +**UI/API:** UI and API + +**Additional Notes:** This permission is automatically inherited if Project Contributor/Administrator is set in the UI. + + +
+ +You must connect your service user before setting up a dbt Cloud project, as the the service user's permissions determine which projects dbt Cloud can import. + +To connect the service user: +1. An admin must first be signed into the service user's Azure DevOps account. +2. The admin should click **Link Azure Service User** in dbt Cloud. +3. The admin will be directed to Azure DevOps and must accept the Azure AD app's permissions. +4. Finally, the admin will be redirected to dbt Cloud, and the service user will be connected. + + +Once connected, dbt Cloud displays the email address of the service user so you know which user's permissions are enabling headless actions in deployment environments. To change which account is connected, disconnect the profile in dbt Cloud, sign into the alternative Azure DevOps service account, and re-link the account in dbt Cloud. + +:::info Service user authentication expiration +dbt Cloud will refresh the authentication for the service user on each run triggered by the scheduler, API, or CI. If your account does not have any active runs for over 90 days, an admin will need to manually refresh the authentication of the service user by disconnecting and reconnecting the service user's profile via the OAuth flow described above in order to resume headless interactions like project set up, deployment runs, and CI. + +::: diff --git a/website/docs/docs/collaborate/git/version-control-basics.md b/website/docs/docs/collaborate/git/version-control-basics.md new file mode 100644 index 00000000000..dc304d99ca6 --- /dev/null +++ b/website/docs/docs/collaborate/git/version-control-basics.md @@ -0,0 +1,62 @@ +--- +title: "Version control basics" +id: version-control-basics +description: "Leverage Git to version control your code." +sidebar_label: "Version control basics" +--- + +# Version control basics + +When you develop in the command line interface (CLI) or Cloud integrated development environment (IDE), you can leverage Git directly to version control your code. To use version control, make sure you are connected to a Git repository in the CLI or Cloud IDE. + +You can create a separate branch to develop and make changes. The changes you make aren’t merged into the main branch unless it successfully passes tests. This helps keep the code organized and improves productivity by making the development process smooth. + +You can read more about git terminology below and also check out [GitHub Docs](https://docs.github.com/en) as well. + +## Git overview + +Check out some common git terms below that you might encounter when developing: + +| Name | Definition | +| --- | --- | +| Repository or repo | A repository is a directory that stores all the files, folders, and content needed for your project. You can think of this as an object database of the project, storing everything from the files themselves to the versions of those files, commits, and deletions. Repositories are not limited by user and can be shared and copied. | +| Branch | A branch is a parallel version of a repository. It is contained within the repository but does not affect the primary or main branch allowing you to work freely without disrupting the live version. When you've made the changes you want to make, you can merge your branch back into the main branch to publish your changes | +| Checkout | The `checkout` command is used to create a new branch, change your current working branch to a different branch, or switch to a different version of a file from a different branch. | +| Commit | A commit is a user’s change to a file (or set of files). When you make a commit to save your work, Git creates a unique ID that allows you to keep a record of the specific changes committed along with who made them and when. Commits usually contain a commit message which is a brief description of what changes were made. | +| main | The primary, base branch of all repositories. All committed and accepted changes should be on the main branch. In the Cloud IDE, the main branch is read-only. This is because any changes/edits to code cannot and should not be made directly in the base branch. A new branch should be created in order to make any changes to your project | +| Merge | Merge takes the changes from one branch and adds them into another (usually main) branch. These commits are usually first requested via pull request before being merged by a maintainer. | +| Pull Request | If someone has changed code on a separate branch of a project and wants it to be reviewed to add to the main branch, they can submit a pull request. Pull requests ask the repo maintainers to review the commits made, and then, if acceptable, merge the changes upstream. A pull happens when adding the changes to the main branch. | +| Push | A `push` updates a remote branch with the commits made to the current branch. You are literally _pushing_ your changes into the remote. | +| Remote | This is the version of a repository or branch that is hosted on a server. Remote versions can be connected to local clones so that changes can be synced. | + + +## The git button in the Cloud IDE + +You can perform git tasks with the git button in the Cloud IDE. The following are descriptions of each git button command and what they do: + + +| Name | Actions | +| --- | --- | +| Abort merge | This option allows you to cancel a merge that had conflicts. Be careful with this action because all changes will be reset and this operation can't be reverted, so make sure to commit or save all your changes before you start a merge. | +| Change branch | This option allows you to change between branches (checkout). | +| Commit | A commit is an individual change to a file (or set of files). When you make a commit to save your work, Git creates a unique ID (a.k.a. the "SHA" or "hash") that allows you to keep record of the specific changes committed along with who made them and when. Commits usually contain a commit message which is a brief description of what changes were made. When you make changes to your code in the future, you'll need to commit them as well. | +| Create new branch | This allows you to branch off of your base branch and edit your project. You’ll notice after initializing your project that the main branch will be read-only. This is because any changes to code cannot and should not be made directly in the base branch. A new branch should be created in order to make any changes to your project. | +| Initialize your project | This is done when first setting up your project. Initializing a project creates all required directories and files within an empty repository by using the dbt starter project.

Note: This option will not display if your repo isn't completely empty (i.e. includes a README file).

Once you click **Initialize your project**, click **Commit** to finish setting up your project. | +| Open pull request | This allows you to open a pull request in Git for peers to review changes before merging into the base branch. | +| Pull changes from master/main | This option is available if you are on any local branch that is behind the remote version of the base branch or the remote version of the branch that you're currently on. | +| Pull from remote | This option is available if you’re on the local base branch and changes have recently been pushed to the remote version of the branch. Pulling in changes from the remote repo allows you to pull in the most recent version of the base branch. | +| Reclone Your Repository | Reclone your repository directly from the Cloud IDE. You can reset your repository back to a fresh clone from your remote. To do this, click on the bottom right-hand side green **Ready** text, then click **Reclone Repo**. | +| Refresh git state | This enables you to pull new branches from a different remote branch to your local branch with just one command. | + + +## Merge conflicts + +Merge conflicts often occur when multiple users are concurrently making edits to the same section in the same file. This makes it difficult for Git to determine which change should be kept. + +Refer to [resolve merge conflicts](/docs/collaborate/git/resolve-merge-conflicts) to learn how to resolve merge conflicts. + +## The .gitignore file + +dbt Labs recommends that you exclude files so they're not tracked by Git and won't slow down your dbt project. + +You can do this with a special file named [.gitignore](https://github.com/dbt-labs/dbt-starter-project/blob/main/.gitignore) which is automatically included in your dbt project after you initialize it in dbt Cloud. The `.gitignore` file must be placed at the root of your dbt project. diff --git a/website/docs/docs/collaborate/manage-access/about-access.md b/website/docs/docs/collaborate/manage-access/about-access.md new file mode 100644 index 00000000000..844f49e6500 --- /dev/null +++ b/website/docs/docs/collaborate/manage-access/about-access.md @@ -0,0 +1,150 @@ +--- +title: "About access" +id: "about-access" +--- + +## Overview + +dbt Cloud administrators can use dbt Cloud's permissioning model to control +user-level access in a dbt Cloud account. This access control comes in two flavors: +License-based and Role-based. + +- **License-based Access Controls:** User are configured with account-wide + license types. These licenses control the specific parts of the dbt Cloud application + that a given user can access. +- **Role-based Access Control (RBAC):** Users are assigned to _groups_ that have + specific permissions on specific projects or the entire account. A user may be + a member of multiple groups, and those groups may have permissions on multiple + projects. + +## License-based access control + +Each user on an account is assigned a license type when the user is first +invited to a given account. This license type may change over time, but a +user can only have one type of license at any given time. + +A user's license type controls the features in dbt Cloud that the user is able +to access. dbt Cloud's two license types are: + - **Read Only** + - **Developer** + +For more information on these license types, see [Seats & Users](cloud-seats-and-users). +At a high level, Developers may be granted _any_ permissions, whereas Read Only +users will have read-only permissions applied to all dbt Cloud resources +regardless of the role-based permissions that the user is assigned. + +## Role-based access control + +:::info dbt Cloud Enterprise + +Role-based access control is a feature of the dbt Cloud Enterprise plan + +::: + +Role-based access control allows for fine-grained permissioning in the dbt Cloud +application. With role-based access control, users can be assigned varying +permissions to different projects within a dbt Cloud account. For teams on the +Enterprise tier, role-based permissions can be generated dynamically from +configurations in an [Identity Provider](sso-overview). + +Role-based permissions are applied to _groups_ and pertain to _projects_. The +assignable permissions themselves are granted via _permission sets_. + + +### Groups + +A group is a collection of users. Users may belong to multiple groups. Members +of a group inherit any permissions applied to the group itself. + +Users can be added to a dbt Cloud group based on their group memberships in the +configured [Identity Provider](sso-overview) for the account. In this way, dbt +Cloud administrators can manage access to dbt Cloud resources via identity +management software like Azure AD, Okta, or GSuite. See _SSO Mappings_ below for +more information. + +You can view the groups in your account or create new groups from the **Team > Groups** +page in your Account Settings. + + + + +### SSO Mappings + +SSO Mappings connect Identity Provider (IdP) group membership to dbt Cloud group +membership. When a user logs into dbt Cloud via a supported identity provider, +their IdP group memberships are synced with dbt Cloud. Upon logging in +successfully, the user's group memberships (and therefore, permissions) are +adjusted accordingly within dbt Cloud automatically. + +:::tip Creating SSO Mappings + +While dbt Cloud supports mapping multiple IdP groups to a single dbt Cloud +group, we recommend using a 1:1 mapping to make administration as simple as +possible. Consider using the same name for your dbt Cloud groups and your IdP +groups. + +::: + + +### Permission Sets + +Permission sets are predefined collections of granular permissions. Permission +sets combine low-level permission grants into high-level roles that can be +assigned to groups. Some examples of existing permission sets are: + - Account Admin + - Git Admin + - Job Admin + - Job Viewer + - ...and more + +For a full list of enterprise permission sets, see [Enterprise Permissions](/docs/collaborate/manage-access/enterprise-permissions). +These permission sets are available for assignment to groups and control the ability +for users in these groups to take specific actions in the dbt Cloud application. + +In the following example, the _dbt Cloud Owners_ group is configured with the +**Account Admin** permission set on _All Projects_ and the **Job Admin** permission +set on the _Internal Analytics_ project. + + + + +### Manual assignment + + + +- New in version 1.1.23 (March, 2021) + + + +dbt Cloud administrators can manually assign users to groups independently of +IdP attributes. If a dbt Cloud group is configured _without_ any +SSO Mappings, then the group will be _unmanaged_ and dbt Cloud will not adjust +group membership automatically when users log into dbt Cloud via an identity +provider. This behavior may be desirable for teams that have connected an identity +provider, but have not yet configured SSO Mappings between dbt Cloud and the +IdP. + +If an SSO Mapping is added to an _unmanaged_ group, then it will become +_managed_, and dbt Cloud may add or remove users to the group automatically at +sign-in time based on the user's IdP-provided group membership information. + + +## FAQs +- **When are IdP group memberships updated for SSO Mapped groups?** Group memberships + are updated every time a user logs into dbt Cloud via a supported SSO provider. If + you've changed group memberships in your identity provider or dbt Cloud, ask your + users to log back into dbt Cloud for these group memberships to be synchronized. + +- **Can I set up SSO without RBAC?** Yes, see the documentation on + [Manual Assignment](#manual-assignment) above for more information on using + SSO without RBAC. + +- **Can I configure a user's License Type based on IdP Attributes?** Yes, see + the docs on [managing license types](/cloud-seats-and-users#managing-license-types) + for more information. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/audit-log.md b/website/docs/docs/collaborate/manage-access/audit-log.md similarity index 87% rename from website/docs/docs/dbt-cloud/dbt-cloud-enterprise/audit-log.md rename to website/docs/docs/collaborate/manage-access/audit-log.md index 698b4f90071..78d59d9a0a2 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/audit-log.md +++ b/website/docs/docs/collaborate/manage-access/audit-log.md @@ -1,19 +1,22 @@ --- -title: "Using the audit log for dbt Cloud Enterprise" +title: "The audit log for dbt Cloud Enterprise" id: audit-log description: "You can troubleshoot possible issues and provide security audits by reviewing event activity in your organization." -sidebar_label: "Using the audit log" +sidebar_label: "Audit log" --- -To review actions performed by people in your organization, dbt provides logs of audited user and system events. The dbt Cloud audit log lists events triggered in your organization within the last 90 days. - -Use the audit log to quickly review the actions performed by members of your organization. The audit log includes details such as who performed the action, what the action was, and when it was performed. You can use these details to troubleshoot access issues, perform security audits, or analyze specific events. +To review actions performed by people in your organization, dbt provides logs of audited user and system events. You can use the audit log to quickly review the actions performed by members of your organization. The audit log includes details such as who performed the action, what the action was, and when it was performed. You can use these details to troubleshoot access issues, perform security audits, or analyze specific events. You must be an **Account Admin** to access the audit log and this feature is only available on Enterprise plans. +The dbt Cloud audit log stores all the events that occurred in your organization: + +- For events within 90 days, the dbt Cloud audit log has a selectable date range that lists events triggered. +- For events beyond 90 days, **Account Admins** can [export all events](#exporting-logs) by using **Export All**. + ## Accessing the audit log -To access audit log, click **Account Settings** and **Audit Log**. +To access audit log, click the gear icon in the top right, then click **Audit Log**.
@@ -166,10 +169,11 @@ You can search the audit log to find a specific event or actor, which is limited ## Exporting logs -You can use the audit log to export historical audit results for security, compliance, and analysis purposes. You can export data for up to the last 90 days. Click the **Export CSV** button to download a CSV file of all the events that occurred in your organization over the last 90 days. +You can use the audit log to export all historical audit results for security, compliance, and analysis purposes: -
+- For events within 90 days — dbt Cloud will automatically display the 90 days selectable date range. Select **Export Selection** to download a CSV file of all the events that occurred in your organization within 90 days. +- For events beyond 90 days — Select **Export All**. The Account Admin will receive an email link to download a CSV file of all the events that occurred in your organization. + + - -
diff --git a/website/docs/docs/dbt-cloud/access-control/cloud-seats-and-users.md b/website/docs/docs/collaborate/manage-access/cloud-seats-and-users.md similarity index 88% rename from website/docs/docs/dbt-cloud/access-control/cloud-seats-and-users.md rename to website/docs/docs/collaborate/manage-access/cloud-seats-and-users.md index 55bad32f40b..d768e7c6e57 100644 --- a/website/docs/docs/dbt-cloud/access-control/cloud-seats-and-users.md +++ b/website/docs/docs/collaborate/manage-access/cloud-seats-and-users.md @@ -1,6 +1,6 @@ --- title: "Seats and Users" -id: "cloud-seats-and-users" +id: "seats-and-users" --- ## Overview @@ -10,7 +10,7 @@ different types of seat licenses in dbt Cloud: _Developer_ and _Read Only_. The type of license a user is assigned controls which capabilities of dbt Cloud the user is permitted to access. Users with a Developer license can be -granted access to the Deployment and [Development](the-dbt-ide) functionality +granted access to the Deployment and [Development](/docs/get-started/develop-in-the-cloud) functionality in dbt Cloud, whereas users with Read Only licenses are intended to view the [artifacts](docs/dbt-cloud/using-dbt-cloud/artifacts) created in a dbt Cloud account. @@ -20,8 +20,8 @@ in dbt Cloud, whereas users with Read Only licenses are intended to view the | Use Jobs | ✅ | ❌ | | Manage Account | ✅ | ❌ | | API Access | ✅ | ❌ | -| Use [Source Freshness](cloud-snapshotting-source-freshness) | ✅ | ✅ | -| Use [Docs](cloud-generating-documentation) | ✅ | ✅ | +| Use [Source Freshness](/docs/deploy/source-freshness) | ✅ | ✅ | +| Use [Docs](/docs/collaborate/build-and-view-your-docs) | ✅ | ✅ | ## Included seats @@ -37,7 +37,7 @@ dbt Cloud plan. | Plan | Developer Seats | Read Only Seats | | ---- | --------------- | --------------- | | Developer (free) | 1 | 0 | -| Team | $50/developer/mo | 50 ($0/mo) | +| Team | $50/developer/mo | 5 ($0/mo) | | Enterprise | Custom | Custom | ## Managing license types @@ -66,7 +66,7 @@ change. **Note:** This feature is only available on the Enterprise plan. If your account is connected to an Identity Provider (IdP) for [Single Sign -On](dbt-cloud-enterprise/sso-overview), you can automatically map IdP user +On](/docs/collaborate/manage-access/sso-overview), you can automatically map IdP user groups to specific license types in dbt Cloud. To configure license mappings, navigate to the Account Settings > Team > License Mappings page. From here, you can create or edit SSO mappings for both Read Only and Developer @@ -96,5 +96,5 @@ Usage notes: ## Granular permissioning The dbt Cloud Enterprise plan supports Role-Based access controls for -configuring granular in-app permissions. See [access control](access-control-overview) +configuring granular in-app permissions. See [access control](/docs/collaborate/manage-access/about-access) for more information on Enterprise permissioning. diff --git a/website/docs/docs/dbt-cloud/access-control/enterprise-permissions.md b/website/docs/docs/collaborate/manage-access/enterprise-permissions.md similarity index 65% rename from website/docs/docs/dbt-cloud/access-control/enterprise-permissions.md rename to website/docs/docs/collaborate/manage-access/enterprise-permissions.md index 1e81bd58d13..7a0031d3c7a 100644 --- a/website/docs/docs/dbt-cloud/access-control/enterprise-permissions.md +++ b/website/docs/docs/collaborate/manage-access/enterprise-permissions.md @@ -1,7 +1,7 @@ --- -title: "Enterprise Permissions" +title: "Enterprise permissions" id: "enterprise-permissions" -description: "Permission sets for Enterprise plans." +description: "Permission sets for Enterprise plans." --- :::info Enterprise Feature @@ -15,12 +15,14 @@ If you're interested in learning more about an Enterprise plan, contact us at sa The dbt Cloud Enterprise plan supports a number of pre-built permission sets to help manage access controls within a dbt Cloud account. See the docs on [access -control](access-control-overview) for more information on Role-Based access +control](/docs/collaborate/manage-access/about-access) for more information on Role-Based access control (RBAC). ## Permission Sets -The following permission sets are available for assignment in dbt Cloud Enterprise accounts. They can be granted to dbt Cloud groups, where users belong. A dbt Cloud group can be associated with more than one permission set. +The following permission sets are available for assignment in dbt Cloud Enterprise accounts. They +can be granted to dbt Cloud groups which are then in turn granted to users. A dbt Cloud group +can be associated with more than one permission sets. ### Account Admin @@ -36,18 +38,38 @@ Account Admins have unrestricted access to dbt Cloud accounts. Users with Accoun - Create, delete, and modify Jobs - Create, delete, and modify Groups - Create, delete, and modify Group Memberships -- Manage notification settings +- Manage Notification Settings - Manage account-level [artifacts](dbt-cloud/using-dbt-cloud/artifacts) - View and modify Account Settings - Use the IDE - Run and cancel jobs +### Project Creator +- **Has permissions on:** Authorized projects, account-level settings +- **License restrictions:** must have a developer license + +Project Creators have write and read-only access to dbt Cloud accounts, but do not have the permissions required to modify SSO settings and account integrations. + +Users with Project Creator permissions can: + +- View Account Settings +- View and modify project users +- Create, delete and modify all projects in an account +- Create, delete, and modify Repositories +- Create, delete, and modify Connections +- Create, delete, and modify Environments +- Create, delete, and modify Jobs +- Use the IDE +- Run and cancel jobs +- View Groups +- View Notification Settings + ### Account Viewer - **Has permissions on:** Authorized projects, account-level settings - **License restrictions:** must have a developer license -Account Viewers have read only access to dbt Cloud accounts. Users with Account Viewer permissions can: +Account Viewers have read only access to dbt Cloud accounts. Users with Account Viewer permissions can: - View all projects in an account - View Account Settings - View Repositories @@ -56,7 +78,7 @@ Account Viewers have read only access to dbt Cloud accounts. Users with Account - View Jobs - View Groups - View Group Memberships -- View notification settings +- View Notification Settings - View account-level artifacts ### Admin @@ -113,6 +135,7 @@ Team Admins can perform the following actions in projects they are assigned to: Job Admins can perform the following actions in projects they are assigned to: - View, edit, and create environments +- View connections - Trigger runs - View historical runs @@ -142,6 +165,7 @@ Developers can perform the following actions in projects they are assigned to: Analysts can perform the following actions in projects they are assigned to: - Use the IDE - Configure personal developer credentials +- View connections - View environments - View job definitions - View historical runs @@ -161,9 +185,21 @@ Stakeholders can perform the following actions in projects they are assigned to: ## How to Set Up RBAC Groups in dbt Cloud - + +Role-Based Access Control (RBAC) is helpful for automatically assigning permissions to dbt admins based on their SSO provider group associations. + - **If you are on a dbt Labs Hosted dbt Cloud instance:** -Contact support via the webapp button or support@getdbt.com to turn on this feature. +Contact support via the webapp button or support@getdbt.com to turn on this feature. +- **If you are on a customer deployed dbt Cloud instance:** +Contact your account manager for instructions on how to turn on this feature. + +Click the gear icon to the top right and select **Account Settings**. From the **Team** section, click **Groups** + + - +1. Select an existing group or create a new group to add RBAC. Name the group (this can be any name you like, but it's recommended to keep it consistent with the SSO groups). If you have configured SSO with SAML 2.0, you may have to use the GroupID instead of the name of the group. +2. Configure the SSO provider groups you want to add RBAC by clicking **Add** in the **SSO** section. These fields are case sensitive and must match the source group formatting. +3. Configure the permissions for users within those groups by clicking **Add** in the **Access** section of the window. + +4. When you've completed your configurations, click **Save**. Users will begin to populate the group automatically once they have signed in to dbt Cloud with their SSO credentials. diff --git a/website/docs/docs/dbt-cloud/access-control/access-control-overview.md b/website/docs/docs/collaborate/manage-access/licenses-and-groups.md similarity index 90% rename from website/docs/docs/dbt-cloud/access-control/access-control-overview.md rename to website/docs/docs/collaborate/manage-access/licenses-and-groups.md index cabde9104c3..da59601f508 100644 --- a/website/docs/docs/dbt-cloud/access-control/access-control-overview.md +++ b/website/docs/docs/collaborate/manage-access/licenses-and-groups.md @@ -1,6 +1,6 @@ --- -title: "Access Control" -id: "access-control-overview" +title: "Licenses and groups" +id: "licenses-and-groups" --- ## Overview @@ -88,10 +88,6 @@ groups. ::: - ### Permission Sets @@ -104,17 +100,17 @@ assigned to groups. Some examples of existing permission sets are: - Job Viewer - ...and more -For a full list of enterprise permission sets, see [Enterprise Permissions](/docs/dbt-cloud/access-control/enterprise-permissions). +For a full list of enterprise permission sets, see [Enterprise Permissions](/docs/collaborate/manage-access/enterprise-permissions). These permission sets are available for assignment to groups and control the ability for users in these groups to take specific actions in the dbt Cloud application. -In the following example, the _Analysts_ group is configured with the -**Analyst** permission set on _All Projects_ and the **Job Admin** permission -set on the _BQ test_ project. +In the following example, the _dbt Cloud Owners_ group is configured with the +**Account Admin** permission set on _All Projects_ and the **Job Admin** permission +set on the _Internal Analytics_ project. diff --git a/website/docs/docs/collaborate/manage-access/self-service-permissions.md b/website/docs/docs/collaborate/manage-access/self-service-permissions.md new file mode 100644 index 00000000000..0d6375452aa --- /dev/null +++ b/website/docs/docs/collaborate/manage-access/self-service-permissions.md @@ -0,0 +1,50 @@ +--- +title: "Self-service permissions" +id: "self-service-permissions" +--- +## Overview + +dbt Cloud supports two different permission sets to manage permissions for self-service accounts: **Member** and **Owner**. + +The permissions afforded to each role are described below: + +| Action | Member | Owner | +| ------ | ------ | ----- | +| View and edit resources | ✅ | ✅ | +| Trigger runs | ✅ | ✅ | +| Access the IDE | ✅ | ✅ | +| Invite Members to the account | ✅ | ✅ | +| Manage billing | ❌ | ✅ | +| Manage team permissions | ❌ | ✅ | +| Invite Owners to the account | ❌ | ✅ | + +## Read Only vs. Developer License Types + +Users configured with Read Only license types will experience a restricted set of permissions in dbt Cloud. If a user is associated with a _Member_ permission set and a Read Only seat license, then they will only have access to what a Read-Only seat allows. See [Seats and Users](cloud-seats-and-users) for more information on the impact of licenses on these permissions. + +## Owner and Member Groups in dbt Cloud Enterprise + +By default, new users are added to the Member and Owner groups when they onboard to a new dbt Cloud account. Member and Owner groups are included with every new dbt Cloud account because they provide access for administrators to add users and groups, and to apply permission sets. + +You will need owner and member groups to help with account onboarding, but these groups can create confusion when initially setting up SSO and RBAC for dbt Cloud Enterprise accounts as described in the [Enterprise Permissions](enterprise-permissions) guide. Owner and Member groups are **account level** groups, so their permissions override any project-level permissions you wish to apply. + +After onboarding administrative users and configuring RBAC/SSO groups, we recommend the following steps for onboarding users to a dbt Cloud Enterprise account. + +### Prerequisites + +You need to create an Account Admins group before removing any other groups. + +1. Create an Account Admins group. +2. Assign at least one user to the Account Admins group. The assigned user can manage future group, SSO mapping, and user or group assignment. + +### Remove the Owner and Member groups + +Follow these steps for both Owner and Member groups: + +1. Log into dbt Cloud. +2. Click the gear icon at the top right and select **Account settings**. +3. Select **Groups** then select **OWNER** or **MEMBER**** group. +4. Click **Edit**. +5. At the bottom of the Group page, click **Delete**. + +The Account Admin can add additional SSO mapping groups, permission sets, and users as needed. diff --git a/website/docs/docs/collaborate/manage-access/set-up-bigquery-oauth.md b/website/docs/docs/collaborate/manage-access/set-up-bigquery-oauth.md new file mode 100644 index 00000000000..e807431245a --- /dev/null +++ b/website/docs/docs/collaborate/manage-access/set-up-bigquery-oauth.md @@ -0,0 +1,75 @@ +--- +title: "Set up BigQuery OAuth" +id: "set-up-bigquery-oauth" +--- + +:::info Enterprise Feature + +This guide describes a feature of the dbt Cloud Enterprise plan. If you’re interested in learning more about an Enterprise plan, contact us at sales@getdbt.com. + +::: +### Overview + + +dbt Cloud supports developer [OAuth](https://cloud.google.com/bigquery/docs/authentication) with BigQuery, providing an additional layer of security for dbt enterprise users. When BigQuery OAuth is enabled for a dbt Cloud project, all dbt Cloud developers must authenticate with BigQuery in order to use the dbt Cloud IDE. The project's deployment environments will still leverage the BigQuery service account key set in the project credentials. + + +To set up BigQuery OAuth in dbt Cloud, a BigQuery admin must: +1. [Create a BigQuery OAuth 2.0 client ID and secret](#creating-a-bigquery-oauth-20-client-id-and-secret) in BigQuery. +2. [Configure the connection](#configure-the-connection-in-dbt-cloud) in dbt Cloud. + +To use BigQuery in the dbt Cloud IDE, all developers must: +1. [Authenticate to BigQuery](#authenticating-to-bigquery) in the their profile credentials. + + +### Creating a BigQuery OAuth 2.0 client ID and secret +To get started, you need to create a client ID and secret for [authentication](https://cloud.google.com/bigquery/docs/authentication) with BigQuery. This client ID and secret will be stored in dbt Cloud to manage the OAuth connection between dbt Cloud users and BigQuery. + +In the BigQuery console, navigate to **APIs & Services** and select **Credentials**: + + + +On the **Credentials** page, you can see your existing keys, client IDs, and service accounts. + +Set up an [OAuth consent screen](https://support.google.com/cloud/answer/6158849) if you haven't already. Then, click **+ Create Credentials** at the top of the page and select **OAuth client ID**. + +Fill in the application details as follows: + +| Config | Value | +| ------ | ----- | +| **Application type** | Web application | +| **Name** | dbt Cloud | +| **Authorized Javascript origins** | https://cloud.getdbt.com | +| **Authorized Redirect URIs** | https://cloud.getdbt.com/complete/bigquery | + +If you're a dbt Cloud single tenant customer, you need to replace `cloud.getdbt.com` with the hostname of +your dbt Cloud instance. + +Then click **Create** to create the BigQuery OAuth app and see the app client ID and secret values. These values are available even if you close the app screen, so this isn't the only chance you have to save them. + + + + + +### Configure the Connection in dbt Cloud +Now that you have an OAuth app set up in BigQuery, you'll need to add the client ID and secret to dbt Cloud. To do so: + - go to [Settings](https://cloud.getdbt.com/next/settings/profile) + - on the left, select **Projects** under **Account Settings** + - choose your project from the list + - select **Connection** to edit the connection details + - add the client ID and secret from the BigQuery OAuth app under the **OAuth2.0 Settings** section + + + + +### Authenticating to BigQuery +Once the BigQuery OAuth app is set up for a dbt Cloud project, each dbt Cloud user will need to authenticate with BigQuery in order to use the IDE. To do so: +- go to the [Credentials](https://cloud.getdbt.com/next/settings/profile#credentials) section +- choose your project from the list +- select **Authenticate BigQuery Account** + + +You will then be redirected to BigQuery and asked to approve the drive, cloud platform, and BigQuery scopes, unless the connection is less privileged. + + +Select **Allow**. This redirects you back to dbt Cloud. You should now be an authenticated BigQuery user, ready to use the dbt Cloud IDE. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-snowflake-sso.md b/website/docs/docs/collaborate/manage-access/set-up-snowflake-oauth.md similarity index 75% rename from website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-snowflake-sso.md rename to website/docs/docs/collaborate/manage-access/set-up-snowflake-oauth.md index f4c3733f8d8..270ceadf6c8 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-snowflake-sso.md +++ b/website/docs/docs/collaborate/manage-access/set-up-snowflake-oauth.md @@ -1,6 +1,6 @@ --- -title: "Setting up Snowflake OAuth" -id: "setting-up-enterprise-snowflake-oauth" +title: "Set up Snowflake OAuth" +id: "set-up-snowflake-oauth" --- :::info Enterprise Feature @@ -41,7 +41,7 @@ CREATE OR REPLACE SECURITY INTEGRATION DBT_CLOUD | ENABLED | Required | | OAUTH_CLIENT | Required | | OAUTH_CLIENT_TYPE | Required | -| OAUTH_REDIRECT_URI | Required. If dbt Cloud is deployed on-premises, use the domain name of your application instead of `cloud.getdbt.com` | +| OAUTH_REDIRECT_URI | Required. Use the access URL that corresponds to your server [region](/docs/deploy/regions). If dbt Cloud is deployed on-premises, use the domain name of your application instead of the access URL. | | OAUTH_ISSUE_REFRESH_TOKENS | Required | | OAUTH_REFRESH_TOKEN_VALIDITY | Required. This configuration dictates the number of seconds that a refresh token is valid for. Use a smaller value to force users to re-authenticate with Snowflake more frequently. | @@ -52,7 +52,7 @@ Additional configuration options may be specified for the security integration a The Database Admin is responsible for creating a Snowflake Connection in dbt Cloud. This Connection is configured using a Snowflake Client ID and Client Secret. When configuring a Connection in dbt Cloud, select the "Allow SSO Login" checkbox. Once this checkbox is selected, you will be prompted to enter an OAuth Client ID and OAuth Client Secret. These values can be determined by running the following query in Snowflake: ``` -with +with integration_secrets as ( select parse_json(system$show_oauth_client_secrets('DBT_CLOUD')) as secrets @@ -61,13 +61,13 @@ integration_secrets as ( select secrets:"OAUTH_CLIENT_ID"::string as client_id, secrets:"OAUTH_CLIENT_SECRET"::string as client_secret -from +from integration_secrets; ``` -Enter the Client ID and Client Secret into dbt Cloud to complete the creation of your Connection. +Enter the Client ID and Client Secret into dbt Cloud to complete the creation of your Connection. - + ### Authorize Developer Credentials @@ -75,22 +75,27 @@ Once Snowflake SSO is enabled, users on the project will be able to configure th ### SSO OAuth Flow Diagram -![image](https://user-images.githubusercontent.com/46451573/84427818-841b3680-abf3-11ea-8faf-693d4a39cffb.png) + Once a user has authorized dbt Cloud with Snowflake via their identity provider, Snowflake will return a Refresh Token to the dbt Cloud application. dbt Cloud is then able to exchange this refresh token for an Access Token which can then be used to open a Snowflake connection and execute queries in the dbt Cloud IDE on behalf of users. **NOTE**: The lifetime of the refresh token is dictated by the OAUTH_REFRESH_TOKEN_VALIDITY parameter supplied in the “create security integration” statement. When a user’s refresh token expires, the user will need to re-authorize with Snowflake to continue development in dbt Cloud. ### Setting up multiple dbt Cloud projects with Snowflake 0Auth -If you are planning to set up the same Snowflake account to different dbt Cloud projects, you can use the same security integration for all of the projects. +If you are planning to set up the same Snowflake account to different dbt Cloud projects, you can use the same security integration for all of the projects. -### FAQs +### FAQs #### How do I use the Blocked Roles List with dbt Cloud? ### Troubleshooting #### Invalid consent request -When clicking on the `Connect Snowflake Account` successfully redirects you to the Snowflake login page, but you receive an `Invalid consent request` error, your Snowflake user may not have access to the Snowflake role defined on the development credentials in dbt Cloud. Double-check that you have access to that role and if the role name has been correctly entered in as Snowflake is case sensitive. +When clicking on the `Connect Snowflake Account` successfully redirects you to the Snowflake login page, but you receive an `Invalid consent request` error. This could mean: +* Your user might not have access to the Snowflake role defined on the development credentials in dbt Cloud. Double-check that you have access to that role and if the role name has been correctly entered in as Snowflake is case sensitive. +* You're trying to use a role that is in the [BLOCKED_ROLES_LIST](https://docs.snowflake.com/en/user-guide/oauth-partner.html#blocking-specific-roles-from-using-the-integration), such as `ACCOUNTADMIN`. +#### Server error 500 +If you experience a 500 server error when redirected from Snowflake to dbt Cloud, double check that you have whitelisted [dbt Cloud's IP addresses](/docs/deploy/regions) on a Snowflake account level. +Enterprise customers who have single-tenant deployments will have a different range of IP addresses (network CIDR ranges) to whitelist. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory.md b/website/docs/docs/collaborate/manage-access/set-up-sso-azure-active-directory.md similarity index 84% rename from website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory.md rename to website/docs/docs/collaborate/manage-access/set-up-sso-azure-active-directory.md index cb5f0b29fa2..736eba16850 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory.md +++ b/website/docs/docs/collaborate/manage-access/set-up-sso-azure-active-directory.md @@ -1,6 +1,7 @@ --- -title: "Setting up SSO with Azure AD" -id: "setting-up-enterprise-sso-with-azure-active-directory" +title: "Set up SSO with Azure Active Directory" +id: "set-up-sso-azure-active-directory" +sidebar_label: "Set up SSO with Azure AD" --- :::info Enterprise Feature @@ -19,7 +20,7 @@ Currently supported features include: ## Configuration -dbt Cloud supports both single tenant and multitenant Azure Active Directory SSO +dbt Cloud supports both single tenant and multi-tenant Azure Active Directory SSO Connections. For most Enterprise purposes, you will want to use the single tenant flow when creating an Azure AD Application. @@ -42,17 +43,14 @@ need to select the appropriate directory and then register a new application. Redirect URI values for single-tenant and multi-tenant deployments. For most enterprise use-cases, you will want to use the single-tenant Redirect URI. -:::note VPC Deployment -If you are deploying dbt Cloud into a VPC, you should use the hostname where -the dbt Cloud application is deployed instead of `https://cloud.getdbt.com` in -the **Redirect URI** input. -::: | Application Type | Redirect URI | | ----- | ----- | | Single-Tenant _(recommended)_ | `https://cloud.getdbt.com/complete/azure_single_tenant` | | Multi-Tenant | `https://cloud.getdbt.com/complete/azure_multi_tenant` | +*Note:* If your dbt account instance is a VPC deployment or is based [outside the US](/docs/deploy/regions), your login URL will use the domain supplied to you by your dbt Labs account team, instead of the domain `cloud.getdbt.com`. + 5. Save the App registration to continue setting up Azure AD SSO @@ -76,7 +74,7 @@ the Redirect URI in the steps above, then skip ahead to step 8. -### Azure <-> dbt Cloud User and Group mapping +### Azure <-> dbt Cloud User and Group mapping The Azure users and groups you will create in the following steps are mapped to groups created in dbt Cloud based on the group name. Reference the docs on [enterprise permissions](enterprise-permissions) for additional information on how users, groups, and permission sets are configured in dbt Cloud. @@ -94,7 +92,7 @@ Once you've registered the application, the next step is to assign users to it. :::info User assignment required? Under **Properties** check the toggle setting for **User assignment required?** and confirm it aligns to your requirements. Most customers will want this toggled to **Yes** so that only users/groups explicitly assigned to dbt Cloud will be able to sign in. If this setting is toggled to **No** any user will be able to access the application if they have a direct link to the application per [Azure AD Documentation](https://docs.microsoft.com/en-us/azure/active-directory/manage-apps/assign-user-or-group-access-portal#configure-an-application-to-require-user-assignment) -::: +::: ### Configuring permissions @@ -139,17 +137,9 @@ Under **Properties** check the toggle setting for **User assignment required?** To complete setup, follow the steps below in the dbt Cloud application. -### Enable Azure AD Native Auth (beta) - -- For users accessing dbt Cloud at cloud.getdbt.com, contact your account manager to - gain access to the Azure AD Native auth configuration UI -- For users accessing dbt Cloud deployed in a VPC, enable the `native_azure` - feature flag in the dbt Cloud admin backend. - ### Supplying credentials -24. Navigate to the **Enterprise > Single Sign On** page under Account -Settings. +24. Go to [Settings](https://cloud.getdbt.com/next/settings/profile). On the left side, select **Single Sign On** under **Account Settings**. 25. Click the **Edit** button and supply the following SSO details: | Field | Value | @@ -162,10 +152,10 @@ Settings. | **Slug** | Enter your desired login slug. Users will be able to log into dbt Cloud by navigating to `https://cloud.getdbt.com/enterprise-login/`. Login slugs must be unique across all dbt Cloud accounts, so pick a slug that uniquely identifies your company. | - + 26. Click **Save** to complete setup for the Azure AD SSO integration. From - here, you can navigate to the URL generated for your account's _slug_ to + here, you can navigate to the login URL generated for your account's _slug_ to test logging in with Azure AD. :::success Logging in @@ -175,16 +165,15 @@ by navigating to the URL: `https://cloud.getdbt.com/enterprise-login/` ::: -*Note:* If your dbt account is configured with a single-tenant cloud or on-premise installation, the `cloud.getdbt.com` domain in the URL above will be replaced with a different value. +*Note:* If your dbt account instance is a VPC deployment or is [based outside the US](/docs/deploy/regions), your login URL will use the domain supplied to you by your dbt Labs account team, instead of the domain `cloud.getdbt.com`. ## Setting up RBAC Now you have completed setting up SSO with Azure AD, the next steps will be to set up -[RBAC groups](/docs/dbt-cloud/access-control/enterprise-permissions) to complete your access control configuration. +[RBAC groups](/docs/collaborate/manage-access/enterprise-permissions) to complete your access control configuration. ## Troubleshooting Tips -Ensure that the domain name under which user accounts exist in Azure matches the domain supplied in the SSO configuration on the dbt side. +Ensure that the domain name under which user accounts exist in Azure matches the domain you supplied in [Supplying credentials](#supplying-credentials) when you configured SSO. - diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite.md b/website/docs/docs/collaborate/manage-access/set-up-sso-google-workspace.md similarity index 87% rename from website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite.md rename to website/docs/docs/collaborate/manage-access/set-up-sso-google-workspace.md index 6259d28b737..3aba99f2bd0 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite.md +++ b/website/docs/docs/collaborate/manage-access/set-up-sso-google-workspace.md @@ -1,6 +1,6 @@ --- -title: "Setting up SSO with GSuite" -id: "setting-up-sso-with-google-gsuite" +title: "Set up SSO with Google Workspace" +id: "set-up-sso-google-workspace" --- :::info Enterprise Feature @@ -13,10 +13,10 @@ dbt Cloud Enterprise supports Single-Sign On (SSO) via Google GSuite. You will n permissions to create and manage a new Google OAuth2 application, as well as access to enable the Google Admin SDK. Gsuite is a component within Google Cloud Platform (GCP), so you will also need access to a login with permissions -to manage the GSuite application within a GCP account. +to manage the GSuite application within a GCP account. Some customers choose to use different cloud providers for User and Group permission setup -than for hosting infrastructure. For example, it's certainly possible to use GSuite to +than for hosting infrastructure. For example, it's certainly possible to use GSuite to manage login information and Multifactor Authentication (MFA) configuration while hosting data workloads on AWS. @@ -90,13 +90,6 @@ and ensure that the API is enabled. To complete setup, follow the steps below in the dbt Cloud application. -### Enable GSuite Native Auth (beta) - -- For users accessing dbt Cloud at cloud.getdbt.com, contact your account manager to - gain access to the GSuite Native auth configuration UI -- For users accessing dbt Cloud deployed in a VPC, enable the `native_gsuite` - feature flag in the dbt Cloud admin backend. - ### Supply your OAuth Client ID and Client Secret 1. Navigate to the **Enterprise > Single Sign On** page under Account @@ -105,10 +98,10 @@ Settings. - **Log in with**: GSuite - **Client ID**: Paste the Client ID generated in the steps above - **Client Secret**: Paste the Client Secret generated in the steps above - - **Domain in GSuite**: Enter the domain name for your GSuite account (eg. `fishtownanalytics.com`). + - **Domain in GSuite**: Enter the domain name for your GSuite account (eg. `dbtlabs.com`). Only users with an email address from this domain will be able to log into your dbt Cloud account using GSuite auth. Optionally, you may specify a CSV of domains - which are _all_ authorized to access your dbt Cloud account (eg. `fishtownanalytics.com, fishtowndata.com`) + which are _all_ authorized to access your dbt Cloud account (eg. `dbtlabs.com, fishtowndata.com`) - **Slug**: Enter your desired login slug. Users will be able to log into dbt Cloud by navigating to `https://cloud.getdbt.com/enterprise-login/`. Login slugs must be unique across all dbt Cloud accounts, so pick a slug that uniquely @@ -118,11 +111,12 @@ Settings. dropped into the GSuite OAuth flow and prompted to log into dbt Cloud with your work email address. If authentication is successful, you will be redirected back to the dbt Cloud application. -4. On the **Verify SSO Credentials** page, verify that a `groups` entry is +4. On the **Credentials** page, verify that a `groups` entry is present, and that it reflects the groups you are a member of in GSuite. If - you do not see a `groups` entry in the IdP attribute list, consult the - Troubleshooting steps below. - + you do not see a `groups` entry in the IdP attribute list, consult the following + Troubleshooting steps. + + If the verification information looks appropriate, then you have completed the configuration of GSuite SSO. Members of your team should now be able to log @@ -130,11 +124,17 @@ into the dbt Cloud application at `https://cloud.getdbt.com/enterprise-login/` + +Additionally, you may configure the IdP attributes passed from your identity provider into dbt Cloud. We recommend using the following values: + + +| name | name format | value | description | +| ---- | ----------- | ----- | ----------- | +| email | Unspecified | ${user.email} | The user's email address | +| first_name | Unspecified | ${user.first_name} | The user's first name | +| last_name | Unspecified | ${user.last_name} | The user's last name | + +dbt Cloud's [role-based access control](/docs/collaborate/manage-access/about-access#role-based-access-control) relies +on group mappings from the IdP to assign dbt Cloud users to dbt Cloud groups. To +use role-based access control in dbt Cloud, also configure your identity +provider to provide group membership information in user attribute called +`groups`: + +| name | name format | value | description | +| ---- | ----------- | ----- | ----------- | +| groups | Unspecified | `` | The groups a user belongs to in the IdP | + +:::info Note +You may use a restricted group attribute statement to limit the groups set +to dbt Cloud for each authenticated user. For example, if all of your dbt Cloud groups start +with `DBT_CLOUD_...`, you may optionally apply a filter like `Starts With: DBT_CLOUD_`. +Please contact support if you have any questions. +::: + +### Collect integration secrets + +After confirming your details, the IdP should show you the following values for +the new SAML 2.0 integration. Keep these values somewhere safe, as you will need +them to complete setup in dbt Cloud. + +- Identity Provider Issuer +- Identity Provider SSO Url +- X.509 Certificate + +### Finish setup + +After creating the Okta application, follow the instructions in the [dbt Cloud Setup](#dbt-cloud-setup) +section to complete the integration. + +## Okta integration +You can use the instructions in this section to configure Okta as your identity provider. + +1. Log into your Okta account. Using the Admin dashboard, create a new app. + + + +2. Select the following configurations: + - **Platform**: Web + - **Sign on method**: SAML 2.0 + +3. Click **Create** to continue the setup process. + + + +### Configure the Okta application + +1. On the **General Settings** page, enter the following details: + + * **App name**: dbt Cloud + * **App logo** (optional): You can optionally [download the dbt logo](https://drive.google.com/file/d/1fnsWHRu2a_UkJBJgkZtqt99x5bSyf3Aw/view?usp=sharing), + and upload it to Okta to use as the logo for this app. + +2. Click **Next** to continue. + + + +### Configure SAML Settings + +1. On the **SAML Settings** page, enter the following values: + + * **Single sign on URL**: `https://cloud.getdbt.com/complete/okta` + * **Audience URI (SP Entity ID)**: `https://cloud.getdbt.com/` + * **Relay State**: `` + + + +2. Map your organization's Okta User and Group Attributes to the format that +dbt Cloud expects by using the Attribute Statements and Group Attribute Statements forms. + +3. The following table illustrates expected User Attribute Statements: + + | Name | Name format | Value | Description | + | -------------- | ----------- | -------------------- | -------------------------- | + | `email` | Unspecified | `${user.email}` | _The user's email address_ | + | `first_name` | Unspecified | `${user.firstName}` | _The user's first name_ | + | `last_name` | Unspecified | `${user.lastName}` | _The user's last name_ | + +4. The following table illustrates expected **Group Attribute Statements**: + + | Name | Name format | Filter | Value | Description | + | -------- | ----------- | ------------- | ----- | ------------------------------------- | + | `groups` | Unspecified | Matches regex | `.*` | _The groups that the user belongs to_ | + + +You can instead use a more restrictive Group Attribute Statement than the +example shown in the previous steps. For example, if all of your dbt Cloud groups start with +`DBT_CLOUD_`, you may use a filter like `Starts With: DBT_CLOUD_`. **Okta +only returns 100 groups for each user, so if your users belong to more than 100 +IdP groups, you will need to use a more restrictive filter**. Please contact +support if you have any questions. + + + +5. Click **Next** to continue. + +### Finish Okta setup + +1. Select *I'm an Okta customer adding an internal app*. +2. Select *This is an internal app that we have created*. +3. Click **Finish** to finish setting up the +app. + + + +### View setup instructions + +1. On the next page, click **View Setup Instructions**. +2. In the steps below, you'll supply these values in your dbt Cloud Account Settings to complete +the integration between Okta and dbt Cloud. + + + + + +3. After creating the Okta application, follow the instructions in the [dbt Cloud Setup](#dbt-cloud-setup) +section to complete the integration. + +## Google integration + +Use this section if you are configuring Google as your identity provider. + +### Configure the Google application + +1. Sign into your **Google Admin Console** via an account with super administrator privileges. +2. From the Admin console Home page, go to **Apps** and then click **Web and mobile apps**. +3. Click **Add**, then click **Add custom SAML app**. +4. Click **Next** to continue. +5. Make these changes on the App Details page: + - Name the custom app + - Upload an app logo (optional) + - Click **Continue**. + +### Configure SAML Settings + +1. Go to the **Google Identity Provider details** page. +2. Download the **IDP metadata**. +3. Copy the **SSO URL** and **Entity ID** and download the **Certificate** (or **SHA-256 fingerprint**, if needed). +4. Enter the following values on the **Service Provider Details** window: + - **ACS URL**: `https://cloud.getdbt.com/complete/saml` + - **Audience URI (SP Entity ID)**: `https://cloud.getdbt.com/` + - **Start URL**: (if needed) +5. The default **Name ID** is the primary email. Multi-value input is not supported. +6. Use the **Attribute mapping** page to map your organization's Google Directory Attributes to the format that +dbt Cloud expects. +7. Click **Add another mapping** to map additional attributes. + +Expected **Attributes**: + +| Name | Name format | Value | Description | +| -------------- | ----------- | -------------------- | -------------------------- | +| `First name` | Unspecified | `first_name` | The user's first name. | +| `Last name` | Unspecified | `last_name` | The user's last name. | +| `Primary email`| Unspecified | `email` | The user's email address. | + +8. Click **Finish** to continue. + + +### Finish Google setup + +1. From the Admin console Home page, go to **Apps** and then click **Web and mobile apps**. +2. Select your SAML app. +3. Click **User access**. +4. To turn on or off a service for everyone in your organization, click **On for everyone** or **Off for everyone**, and then click **Save**. +5. Ensure that the email addresses your users use to sign in to the SAML app match the email addresses they use to sign in to your Google domain (Changes typically take effect in minutes, but can take up to 24 hours). + +### Finish setup + +After creating the Google application, follow the instructions in the [dbt Cloud Setup](#dbt-cloud-setup) + +## Azure integration + +If you're using Azure Active Directory (Azure AD), the instructions below will help you configure it as your identity provider. + +### Create Azure AD Enterprise application + +Follow these steps to set up single sign-on (SSO) with dbt Cloud: + +1. Log into your Azure account. +2. In the Azure AD portal, select **Enterprise applications** and click **+ New application**. +3. Select **Create your own application**. +4. Name the application "dbt Cloud" or another descriptive name. +5. Select **Integrate any other application you don't find in the gallery (Non-gallery)** as the application type. +6. Click **Create**. +7. You can find the new application by clicking **Enterprise applications** and selecting **All applications**. +8. Click the application you just created and follow the instructions for configuring it in [Configuring SAML endpoints in AD](#configuring-saml-endpoints-in-ad). +9. Select **Single sign-on** under Manage in the left navigation. +10. Click **Set up single sign on** under Getting Started. +11. Click **SAML** in "Select a single sign-on method" section. +12. Click **Edit** in the Basic SAML Configuration section. +13. Use the following table to complete the required fields and connect to dbt: + + | Field | Value | + | ----- | ----- | + | **Identifier (Entity ID)** | Base URL for dbt Cloud. Use `https://cloud.getdbt.com/` or `https://YOUR_COMPANY.getdbt.com/` for a single tenant instance. | + | **Reply URL (Assertion Consumer Service URL)** | Use `https://cloud.getdbt.com/complete/saml` or `https://YOUR_COMPANY.getdbt.com/complete/saml` for a single tenant instance. | + | **Relay State** | The slug you will configure in dbt Cloud. It's usually your company name, but you can pick anything you'd like. | +14. Click **Save** at the top of the form. + +#### Creating SAML Attributes in AD + +From the Set up Single Sign-On with SAML page: + +1. Click **Edit** in the User Attributes & Claims section. +2. Leave the claim under "Required claim" as is. +3. Delete all claims under "Additional claims." +4. Click **Add new claim** and add these three new claims: + + | Name | Source attribute | + | ----- | ----- | + | **email** | user.mail | + | **first_name** | user.givenname | + | **last_name** | user.surname | + +5. Click **Add a group claim** from User Attributes and Claims. +6. If you'll assign users directly to the enterprise application, select **Security Groups**. If not, select **Groups assigned to the application**. +7. Set **Source attribute** to **Group ID**. +8. Under **Advanced options**, check **Customize the name of the group claim** and specify **Name** to **groups**. + +**Note:** Keep in mind that the Group ID in Azure AD maps to that group's GUID. It should be specified in lowercase for the mappings to work as expected. The Source Attribute field alternatively can be set to a different value of your preference. + + +9. After creating the Azure application, follow the instructions in the [dbt Cloud Setup](#dbt-cloud-setup) section to complete the integration. + + +## OneLogin integration + +Use this section if you are configuring OneLogin as your identity provider. + +To configure OneLogin, you will need **Administrator** access. + +### Configure the OneLogin application + +1. Log into OneLogin, and add a new SAML 2.0 Application. +2. Configure the application with the following details: + - **Platform:** Web + - **Sign on method:** SAML 2.0 + - **App name:** dbt Cloud + - **App logo (optional):** You can optionally [download the dbt logo](https://drive.google.com/file/d/1fnsWHRu2a_UkJBJgkZtqt99x5bSyf3Aw/view?usp=sharing), and use as the logo for this app. + +### Configure SAML settings + +The following steps assume your dbt Cloud deployment instance uses `https://cloud.getdbt.com`. If your [deployment](/docs/deploy/regions) uses a different URL, then substitute ` cloud.getdbt.com` with the URL of your instance. + +3. To complete this section, you will need to create a login slug. This slug controls the URL where users on your account +can log into your application. Login slugs are typically the lowercased name of your organization +separated with dashes. For example, the login slug for dbt Labs would be `dbt-labs`. +Login slugs must be unique across all dbt Cloud accounts, so pick a slug that uniquely identifies your company.

+✅ Use: `your-company-name`
+❌ Avoid: `Your-Company-Name` +4. Under the **Configuration tab**, input the following values: + + - **RelayState:** `` + - **Audience (EntityID):** https://cloud.getdbt.com/ + - **ACS (Consumer) URL Validator:** https://cloud.getdbt.com/complete/saml + - **ACS (Consumer) URL:** https://cloud.getdbt.com/complete/saml + +5. Next, go to the **Parameters tab**. You must have a parameter for the Email, First Name, and Last Name attributes and include all parameters in the SAML assertions. When you add the custom parameters, make sure you check the **Include in SAML assertion** checkbox. + +We recommend using the following values: + +| name | name format | value | +| ---- | ----------- | ----- | +| email | Unspecified | Email | +| first_name | Unspecified | First Name | +| last_name | Unspecified | Last Name | + +dbt Cloud's [role-based access control](/docs/collaborate/manage-access/about-access#role-based-access-control) relies +on group mappings from the IdP to assign dbt Cloud users to dbt Cloud groups. To +use role-based access control in dbt Cloud, also configure OneLogin to provide group membership information in user attribute called +`groups`: + +| name | name format | value | description | +| ---- | ----------- | ----- | ----------- | +| groups | Unspecified | Series of groups to be used for your organization | The groups a user belongs to in the IdP | + + +### Collect integration secrets + +6. After confirming your details, go to the **SSO tab**. OneLogin should show you the following values for +the new integration. Keep these values somewhere safe, as you will need them to complete setup in dbt Cloud. + +- Issuer URL +- SAML 2.0 Endpoint (HTTP) +- X.509 Certificate + +### Finish setup + +7. After creating the OneLogin application, follow the instructions in the [dbt Cloud Setup](#dbt-cloud-setup) +section to complete the integration. + +## dbt Cloud Setup + +### Providing IdP values to dbt Cloud + +To complete setup, follow the steps below in dbt Cloud: + +1. Navigate to the **Account Settings** and then click on **Single Sign On**. +2. Click **Edit** on the upper right corner. +3. Provide the following SSO details: + + | Field | Value | + | ----- | ----- | + | Log in with | SAML 2.0 | + | Identity Provider SSO Url | Paste the **Identity Provider Single Sign-On URL** shown in the IdP setup instructions | + | Identity Provider Issuer | Paste the **Identity Provider Issuer** shown in the IdP setup instructions | + | X.509 Certificate | Paste the **X.509 Certificate** shown in the IdP setup instructions | + | Slug | Enter your desired login slug. | + + +4. Click **Save** to complete setup for the SAML 2.0 integration. +5. After completing the setup, you can navigate to the URL generated for your account's _slug_ to +test logging in with your identity provider. Additionally, users added the the SAML 2.0 app +will be able to log in to dbt Cloud from the IdP directly. + +Users in your IdP will now be able to log into the application by navigating to the URL: + +`https://cloud.getdbt.com/enterprise-login/` + +### Setting up RBAC + +After configuring an identity provider, you will be able to set up [role-based +access control](/docs/collaborate/manage-access/enterprise-permissions) for your account. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/sso-overview.md b/website/docs/docs/collaborate/manage-access/sso-overview.md similarity index 51% rename from website/docs/docs/dbt-cloud/dbt-cloud-enterprise/sso-overview.md rename to website/docs/docs/collaborate/manage-access/sso-overview.md index f47323fa886..06fd77d48fe 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/sso-overview.md +++ b/website/docs/docs/collaborate/manage-access/sso-overview.md @@ -6,13 +6,13 @@ id: "sso-overview" :::info Enterprise Feature -This guide describes a feature of the dbt Cloud Enterprise plan. +This guide describes a feature of the dbt Cloud Enterprise plan. If you’re interested in learning more about an Enterprise plan, contact us at sales@getdbt.com. ::: -This overview explains how users are provisioned in dbt Cloud via Single Sign-On (SSO). -dbt Cloud supports JIT (Just-in-Time) provisioning and IdP-initiated login. You can learn more about our supported options [here](/docs/dbt-cloud/dbt-cloud-enterprise). +This overview explains how users are provisioned in dbt Cloud via Single Sign-On (SSO). +dbt Cloud supports JIT (Just-in-Time) provisioning and IdP-initiated login. You can learn more about our supported options [here](https://www.getdbt.com/pricing/). The diagram below explains the basic process by which users are provisioned in dbt Cloud upon logging in with SSO. @@ -33,7 +33,38 @@ The diagram below explains the basic process by which users are provisioned in d - **Attach Matching Accounts**: dbt Cloud find all of the accounts configured to match the SSO config used by this user to log in, and then create a user license record mapping the user to the account. This step will also delete any licenses that the user should not have based on the current SSO config. - **Attach Matching Permissions (Groups)**: dbt Cloud iterates through the groups on the matching accounts, and find all that fit one of the below catergories: - have an SSO mapping group that is assigned to the user - - have the "Assign by Default" option checked. + - have the "Assign by Default" option checked. Then, assign all of these (and only these) to the user license. This step will also remove any permissions that the user should not have based on the current SSO group mappings. - **dbt Cloud Application**: After these steps, the user is redirected into the dbt Cloud application, and they can begin to use the application normally. +## SSO Enforcement + +:::info Security Update + +Please read the following update if you've enabled SSO but still have non-admin users logging in with a password. The changes outlined here will be released after September 15, 2022. + +::: + +Starting September 15, 2022, we will be making these security changes to SSO to increase the security posture of your environment: + +* **SSO Enforcement:** If you have SSO turned on in your organization, dbt Cloud will enforce SSO-only logins for all non-admin users. If an Account Admin already has a password, they can continue logging in with a password. +* **SSO Re-Authentication:** dbt Cloud will prompt you to re-authenticate using your SSO provider every 24 hours to ensure high security. + +### How should non-admin users log in? + +Non-admin users that currently login with a password will no longer be able to do so. They must login using the dbt Enterprise Login URL or an identity provider (IdP). For example, Okta, Azure AD, etc. + +### Security best practices + +There are a few scenarios that might require you to login with a password. We recommend these security best-practices for the two most common scenarios: +* **Onboarding partners and contractors** - We highly recommend that you add partners and contractors to your Identity Provider. IdPs like Okta and Azure Active Directory (AAD) offer capabilities explicitly for temporary employees. We highly recommend that you reach out to your IT team to provision an SSO license for these situations. Using an IdP highly secure, reduces any breach risk, and significantly increases the security posture of your dbt Cloud environment. +* **Identity Provider is down -** Account admins will continue to be able to log in with a password which would allow them to work with your Identity Provider to troubleshoot the problem. + +### Next steps for non-admin users currently logging in with passwords + +If you have any non-admin users logging into dbt Cloud with a password today: + +1. Ensure that all users have a user account in your identity provider and are assigned dbt Cloud so they won’t lose access. +2. Alert all dbt Cloud users that they won’t be able to use a password for logging in anymore unless they are already an Admin with a password. +3. We **DO NOT** recommend promoting any users to Admins just to preserve password-based logins because you will reduce security of your dbt Cloud environment. +** diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/admin-cloud-api.md b/website/docs/docs/dbt-cloud-apis/admin-cloud-api.md similarity index 77% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/admin-cloud-api.md rename to website/docs/docs/dbt-cloud-apis/admin-cloud-api.md index e677299eabb..499d2e904c7 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/admin-cloud-api.md +++ b/website/docs/docs/dbt-cloud-apis/admin-cloud-api.md @@ -11,5 +11,3 @@ The dbt Cloud Administrative API is enabled by default for _Team_ and _Enterpris - and more Reference documentation for the dbt Cloud Administrative v2 API can be found [here](/dbt-cloud/api-v2). - -Reference documentation for the under construction dbt Cloud Administrative v4 API can be found [here](/dbt-cloud/api-v4). diff --git a/website/docs/docs/dbt-cloud-apis/apis-overview.md b/website/docs/docs/dbt-cloud-apis/apis-overview.md new file mode 100644 index 00000000000..fc8e7c76009 --- /dev/null +++ b/website/docs/docs/dbt-cloud-apis/apis-overview.md @@ -0,0 +1,18 @@ +--- +title: "APIs Overview" +id: "overview" +--- + +## Overview + +Accounts on the _Team_ and _Enterprise_ plans can query the dbt Cloud APIs. + +dbt Cloud provides two APIs. + +The [dbt Cloud Administrative API](/docs/dbt-cloud-apis/admin-cloud-api) can be used to administrate a dbt Cloud account. + +The [dbt Metadata API](docs/dbt-cloud-apis/metadata-api) can be used to fetch metadata related to the state and health of your dbt project. + +## How to Access the APIs + +dbt Cloud supports two types of API Tokens: [user tokens](/docs/dbt-cloud-apis/user-tokens) and [service account tokens](/docs/dbt-cloud-apis/service-tokens). Requests to the dbt Cloud APIs can be authorized using these tokens. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview.md b/website/docs/docs/dbt-cloud-apis/metadata-api.md similarity index 71% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview.md rename to website/docs/docs/dbt-cloud-apis/metadata-api.md index db62e5e0be8..169afbb225f 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview.md +++ b/website/docs/docs/dbt-cloud-apis/metadata-api.md @@ -1,12 +1,22 @@ --- -title: "Metadata API Overview" -id: "metadata-overview" +title: "Metadata API" +id: "metadata-api" --- +## About the metadata API + Every time that dbt Cloud runs a dbt project, it generates metadata which pertains to the accuracy, recency, configuration, and structure of the views and tables in the warehouse. dbt Cloud serves a GraphQL API which supports arbitrary queries over this metadata; the endpoint for this API is `https://metadata.cloud.getdbt.com/graphql`. This API is an incredibly rich resource for evaluating data health longitudinally or at a point in time. +## Prerequisites + +The metadata API is available to accounts on the _Team_ and _Enterprise_ plans, for any version >= dbt v0.19.0. Please note that artifacts generated with a version of dbt _less than_ v0.19.0 will not be accessible via the dbt Cloud metadata API. For information on upgrading, see "[Version migration guides](https://docs.getdbt.com/guides/migration/versions)." + +## How to browse the API + We also provide a [graphical explorer](https://metadata.cloud.getdbt.com/graphiql) for this API where you can run ad-hoc queries or browse the schema. As GraphQL provides a self-describing API, the schema shown in the GraphiQL interface is an accurate representation of the graph and fields available to query on at any point in time. -The metadata API is available to accounts on the _Team_ and _Enterprise_ plans, for any version >= dbt v0.19.0. Please note that artifacts generated with a version of dbt _less than_ v0.19.0 will not be accessible via the dbt Cloud Metadata API. +## Retention limits for data + +You can use the metadata API to query data from the previous 3 months. For example, if today was April 1, you could query data back to January 1st. *We are continuously expanding the capabilities of the metadata API and we welcome your feedback and suggestions at metadata@dbtlabs.com.* diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-querying.md b/website/docs/docs/dbt-cloud-apis/metadata-querying.md similarity index 82% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-querying.md rename to website/docs/docs/dbt-cloud-apis/metadata-querying.md index e90a13a11bc..297d76e58e1 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-querying.md +++ b/website/docs/docs/dbt-cloud-apis/metadata-querying.md @@ -1,12 +1,12 @@ --- -title: "Querying the Metadata API" +title: "Query the Metadata API" id: "metadata-querying" --- -Accounts on the _Team_ and _Enterprise_ plans can query the dbt Metadata API. +Accounts on the _Team_ and _Multi-Tenant Enterprise_ plans can query the dbt Metadata API. ## Authorization -Currently, authorization of requests takes place [using a service token](https://docs.getdbt.com/docs/dbt-cloud/dbt-cloud-api/service-tokens). dbt Cloud admin users can generate a Metadata Only service token that is authorized to execute a specific query against the Metadata API. +Currently, authorization of requests takes place [using a service token](https://docs.getdbt.com/docs/dbt-cloud-apis/service-tokens). dbt Cloud admin users can generate a Metadata Only service token that is authorized to execute a specific query against the Metadata API. Once you've created a token, you can use it in the Authorization header of requests to the dbt Cloud Metadata API. Be sure to include the Token prefix in the Authorization header, or the request will fail with a `401 Unauthorized` error. Note that `Bearer` can be used in place of `Token` in the Authorization header. Both syntaxes are equivalent. @@ -30,7 +30,7 @@ The `` body should be a JSON string in the format: Every query will rely on a *jobID*. You can get the jobID by clicking into the relevant job in dbt Cloud and observing the URL. In this example URL, the jobID would be 917: `https://cloud.getdbt.com/#/accounts/1/projects/665/jobs/917/` -There are several illustrative example queries in this documentation (examples of queries on the Model node, [here](/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-model). +There are several illustrative example queries in this documentation (examples of queries on the Model node, [here](/docs/dbt-cloud-apis/metadata-schema-model). ## GraphiQL You can also experiment and run queries directly in the [GraphiQL interface](https://metadata.cloud.getdbt.com/graphiql), which is convenient for exploration. On the right hand side, there is a document explorer where you can see all possible nodes and fields. Below is an example of what a query looks like in GraphiQL. Note that you must authenticate via bearer auth with your token. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-exposure.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-exposure.mdx similarity index 97% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-exposure.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-exposure.mdx index 95991a19304..5297cc7da53 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-exposure.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-exposure.mdx @@ -5,7 +5,7 @@ id: "metadata-schema-exposure" import { ArgsTable, SchemaTable } from "./schema"; -The exposure object allows you to query information about a particular exposure. You can learn more about exposures [here](/docs/building-a-dbt-project/exposures). +The exposure object allows you to query information about a particular exposure. You can learn more about exposures [here](/docs/build/exposures). ### Arguments diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-exposures.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-exposures.mdx similarity index 96% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-exposures.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-exposures.mdx index e4d9f54f53e..54ec4bbc905 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-exposures.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-exposures.mdx @@ -5,7 +5,7 @@ id: "metadata-schema-exposures" import { ArgsTable, SchemaTable } from "./schema"; -The exposures object allows you to query information about all exposures in a given job. You can learn more about exposures [here](/docs/building-a-dbt-project/exposures). +The exposures object allows you to query information about all exposures in a given job. You can learn more about exposures [here](/docs/build/exposures). ### Arguments diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-metric.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-metric.mdx similarity index 89% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-metric.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-metric.mdx index 1b7d371ffa3..46a1e79abba 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-metric.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-metric.mdx @@ -18,7 +18,7 @@ Below we show some illustrative example queries and outline the schema (all poss ### Example Queries #### Metric information -The example query below outputs information about a metric. Note that you can also add any field from the Model endpoint -- here we are simply selecting name. This includes schema, database, uniqueId, columns and more -- find documentation [here](https://docs.getdbt.com/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-model). +The example query below outputs information about a metric. Note that you can also add any field from the Model endpoint -- here we are simply selecting name. This includes schema, database, uniqueId, columns and more -- find documentation [here](/docs/dbt-cloud-apis/metadata-schema-model). ```graphql diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-metrics.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-metrics.mdx similarity index 83% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-metrics.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-metrics.mdx index 48a882318c2..b9077e228bd 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-metrics.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-metrics.mdx @@ -51,7 +51,7 @@ The example query returns information about all metrics in this job. ``` ### Fields -metrics has access to the *same fields* as the [metric node](/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metric). The difference is that metrics can output a list, so instead of querying for fields for one specific metric, you can query for those parameters for all metrics in a run. +metrics has access to the *same fields* as the [metric node](/docs/dbt-cloud-apis/metadata-schema-metric). The difference is that metrics can output a list, so instead of querying for fields for one specific metric, you can query for those parameters for all metrics in a run. When querying for `metrics`, the following fields are available: diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-model.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-model.mdx similarity index 77% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-model.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-model.mdx index 19c2f051c6d..9643cc84399 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-model.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-model.mdx @@ -56,6 +56,27 @@ The example query below could be useful if we wanted to understand information a } ``` +#### Column-level information + +You can use the following example query to understand more about the columns of a given model. Note that this will only work if the job has generated documentation. For example it will work with the command `dbt docs generate`. + +```graphql +{ + model(jobId: 123, uniqueId: "model.jaffle_shop.dim_user") { + columns{ + name + index + type + comment + description + tags + meta + } + } +} +``` + + ### Fields When querying for a `model`, the following fields are available: diff --git a/website/docs/docs/dbt-cloud-apis/schema-metadata-modelByEnv.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-modelByEnv.mdx new file mode 100644 index 00000000000..657987039f0 --- /dev/null +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-modelByEnv.mdx @@ -0,0 +1,44 @@ +--- +title: "Model By Environment" +id: "metadata-schema-modelByEnv" +--- + +import { ArgsTable, SchemaTable } from "./schema"; + + + +This model by environment object allows you to query information about a particular model based on `environmentId`. + +The [example query](#example-query) illustrates a few fields you can query in this `modelByEnvironment` object. Refer to [Fields](#fields) to see the entire schema, which provides all possible fields you can query. + +### Arguments + +When querying for `modelByEnvironment`, you can use the following arguments. + + + + +### Example Query + +You can use the `environment_id` and `model_unique_id` to return the model and its execution time for the last 10 times it was run, regardless of which job ran it! + +```graphql +{ + modelByEnvironment( + environmentId: 1, + uniqueId:"model.jaffle_shop.dim_user", + lastRunCount:10, + withCatalog: false + ){ + uniqueId + jobId, + runId, + executionTime + } +} +``` + +### Fields +When querying for `modelByEnvironment`, you can use the following fields. + + diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-models.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-models.mdx similarity index 85% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-models.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-models.mdx index 802e5d03a24..00bef5e1197 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-models.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-models.mdx @@ -46,7 +46,7 @@ The example query below finds all models in this schema, and their respective ex ### Fields -Models has access to the *same fields* as the [Model node](/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-model). The difference is that Models can output a list, so instead of querying for fields for one specific model, you can query for those parameters for all models within a jobID, database, etc. +Models has access to the *same fields* as the [Model node](/docs/dbt-cloud-apis/metadata-schema-model). The difference is that Models can output a list, so instead of querying for fields for one specific model, you can query for those parameters for all models within a jobID, database, etc. When querying for `models`, the following fields are available: diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-seed.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-seed.mdx similarity index 100% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-seed.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-seed.mdx diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-seeds.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-seeds.mdx similarity index 100% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-seeds.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-seeds.mdx diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-snapshots.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-snapshots.mdx similarity index 82% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-snapshots.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-snapshots.mdx index 7d061ff0e0c..19b94c1462b 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-snapshots.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-snapshots.mdx @@ -38,7 +38,7 @@ The example query returns information about all snapshots in this job. ``` ### Fields -Snapshots has access to the *same fields* as the [Snapshot node](/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-snapshots). The difference is that Snapshots can output a list, so instead of querying for fields for one specific snapshot, you can query for those parameters for all snapshots within a jobID, database, etc. +Snapshots has access to the *same fields* as the [Snapshot node](/docs/dbt-cloud-apis/metadata-schema-snapshots). The difference is that Snapshots can output a list, so instead of querying for fields for one specific snapshot, you can query for those parameters for all snapshots within a jobID, database, etc. When querying for `snapshots`, the following fields are available: diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-source.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-source.mdx similarity index 100% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-source.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-source.mdx diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-sources.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-sources.mdx similarity index 85% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-sources.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-sources.mdx index 3c851ba25cf..e124e621baa 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-sources.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-metadata-sources.mdx @@ -45,7 +45,7 @@ The example query below finds all sources in this schema, and their respective s ``` ### Fields -Sources has access to the *same fields* as the [Source node](/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-source). The difference is that Sources can output a list, so instead of querying for fields for one specific source, you can query for those parameters for all sources within a jobID, database, etc. +Sources has access to the *same fields* as the [Source node](/docs/dbt-cloud-apis/metadata-schema-source). The difference is that Sources can output a list, so instead of querying for fields for one specific source, you can query for those parameters for all sources within a jobID, database, etc. When querying for `sources`, the following fields are available: diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-test.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-test.mdx similarity index 100% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-test.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-test.mdx diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-tests.mdx b/website/docs/docs/dbt-cloud-apis/schema-metadata-tests.mdx similarity index 100% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema-metadata-tests.mdx rename to website/docs/docs/dbt-cloud-apis/schema-metadata-tests.mdx diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema.jsx b/website/docs/docs/dbt-cloud-apis/schema.jsx similarity index 100% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/metadata/schema/schema.jsx rename to website/docs/docs/dbt-cloud-apis/schema.jsx diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/service-tokens.md b/website/docs/docs/dbt-cloud-apis/service-tokens.md similarity index 72% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/service-tokens.md rename to website/docs/docs/dbt-cloud-apis/service-tokens.md index b91fa9511fb..f4d3191756a 100644 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/service-tokens.md +++ b/website/docs/docs/dbt-cloud-apis/service-tokens.md @@ -13,7 +13,7 @@ You can use service account tokens for system-level integrations that do not run * Enterprise plans can apply any permission sets available to service tokens. * Team plans can apply Account Admin, Member, Job Admin, Read-Only, and Metadata permissions sets to service tokens. -You can assign as many permission sets as needed to one token. For more on permissions sets, see "[Enterprise Permissions](docs/dbt-cloud/access-control/enterprise-permissions)." +You can assign as many permission sets as needed to one token. For more on permissions sets, see "[Enterprise Permissions](/docs/collaborate/manage-access/enterprise-permissions)." ## Generating service account tokens @@ -34,7 +34,7 @@ You can assign service account tokens any permission set available in dbt Cloud. The following permissions can be assigned to a service account token on a Team plan. **Account Admin**
-Account Admin service tokens have full `read + write` access to an account, so please use them with caution. A Team plan refers to this permission set as an "Owner role." For more on these permissions, see [Account Viewer](docs/dbt-cloud/access-control/enterprise-permissions#account-admin). +Account Admin service tokens have full `read + write` access to an account, so please use them with caution. A Team plan refers to this permission set as an "Owner role." For more on these permissions, see [Account Admin](/docs/collaborate/manage-access/enterprise-permissions#account-admin). **Metadata Only**
Metadata only service tokens can authorize requests to the metadata API. @@ -43,47 +43,47 @@ Metadata only service tokens can authorize requests to the metadata API. Job admin service tokens can authorize requests for viewing, editing, and creating environments, triggering runs, and viewing historical runs. **Member**
-Member service tokens can authorize requests for viewing and editing resources, triggering runs, and inviting members to the account. Tokens assigned the Member permission set will have the same permissions as a Member user. For more information about Member users, see "[Self-service permissions](/dbt-cloud/access-control/self-service-permissions)". +Member service tokens can authorize requests for viewing and editing resources, triggering runs, and inviting members to the account. Tokens assigned the Member permission set will have the same permissions as a Member user. For more information about Member users, see "[Self-service permissions](/docs/collaborate/manage-access/self-service-permissions)". **Read-only**
Read-only service tokens can authorize requests for viewing a read-only dashboard, viewing generated documentation, and viewing source freshness reports. ### Enterprise plans using service account tokens -The following permissions can be assigned to a service account token on an Enterprise plan. For more details about these permissions, see "[Enterprise permissions](/docs/dbt-cloud/access-control/enterprise-permissions)." +The following permissions can be assigned to a service account token on an Enterprise plan. For more details about these permissions, see "[Enterprise permissions](/docs/collaborate/manage-access/enterprise-permissions)." **Account Admin**
-Account Admin service tokens have full `read + write` access to an account, so please use them with caution. For more on these permissions, see [Account Viewer](docs/dbt-cloud/access-control/enterprise-permissions#account-admin). +Account Admin service tokens have full `read + write` access to an account, so please use them with caution. For more on these permissions, see [Account Viewer](/docs/collaborate/manage-access/enterprise-permissions#account-admin). **Metadata Only**
Metadata only service tokens can authorize requests to the metadata API. **Job Admin**
-Job Admin service tokens can authorize request for viewing, editing, and creating environments, triggering runs, and viewing historical runs. For more on these permissions, see [Account Viewer](docs/dbt-cloud/access-control/enterprise-permissions#job-admin). +Job Admin service tokens can authorize request for viewing, editing, and creating environments, triggering runs, and viewing historical runs. For more on these permissions, see [Account Viewer](/docs/collaborate/manage-access/enterprise-permissions#job-admin). **Account Viewer**
-Account Viewer service tokens have read only access to dbt Cloud accounts. For more on these permissions, see [Account Viewer](docs/dbt-cloud/access-control/enterprise-permissions#account-viewer) on the Enterprise Permissions page. +Account Viewer service tokens have read only access to dbt Cloud accounts. For more on these permissions, see [Account Viewer](/docs/collaborate/manage-access/enterprise-permissions#account-viewer) on the Enterprise Permissions page. **Admin**
-Admin service tokens have unrestricted access to projects in dbt Cloud accounts. You have the option to grant that permission all projects in the account or grant the permission only on specific projects. For more on these permissions, see [Admin Service](docs/dbt-cloud/access-control/enterprise-permissions#admin-service) on the Enterprise Permissions page. +Admin service tokens have unrestricted access to projects in dbt Cloud accounts. You have the option to grant that permission all projects in the account or grant the permission only on specific projects. For more on these permissions, see [Admin Service](/docs/collaborate/manage-access/enterprise-permissions#admin-service) on the Enterprise Permissions page. **Git Admin**
-Git admin service tokens have all the permissions listed in [Git admin](/docs/dbt-cloud/access-control/enterprise-permissions#git-admin) on the Enterprise Permissions page. +Git admin service tokens have all the permissions listed in [Git admin](/docs/collaborate/manage-access/enterprise-permissions#git-admin) on the Enterprise Permissions page. **Database Adminn**
-Database admin service tokens have all the permissions listed in [Database admin](/docs/dbt-cloud/access-control/enterprise-permissions#database-admin) on the Enterprise Permissions page. +Database admin service tokens have all the permissions listed in [Database admin](/docs/collaborate/manage-access/enterprise-permissions#database-admin) on the Enterprise Permissions page. **Team Admin**
-Team admin service tokens have all the permissions listed in [Team admin](/docs/dbt-cloud/access-control/enterprise-permissions#team-admin) on the Enterprise Permissions page. +Team admin service tokens have all the permissions listed in [Team admin](/docs/collaborate/manage-access/enterprise-permissions#team-admin) on the Enterprise Permissions page. **Job Viewer**
-Job viewer admin service tokens have all the permissions listed in [Job viewer](/docs/dbt-cloud/access-control/enterprise-permissions#job-viewer) on the Enterprise Permissions page. +Job viewer admin service tokens have all the permissions listed in [Job viewer](/docs/collaborate/manage-access/enterprise-permissions#job-viewer) on the Enterprise Permissions page. **Developer**
-Developer service tokens have all the permissions listed in [Developer](/docs/dbt-cloud/access-control/enterprise-permissions#developer) on the Enterprise Permissions page. +Developer service tokens have all the permissions listed in [Developer](/docs/collaborate/manage-access/enterprise-permissions#developer) on the Enterprise Permissions page. **Analyst**
-Analyst admin service tokens have all the permissions listed in [Analyst](/docs/dbt-cloud/access-control/enterprise-permissions#analyst) on the Enterprise Permissions page. +Analyst admin service tokens have all the permissions listed in [Analyst](/docs/collaborate/manage-access/enterprise-permissions#analyst) on the Enterprise Permissions page. **Stakeholder**
-Stakeholder service tokens have all the permissions listed in [Stakeholder](/docs/dbt-cloud/access-control/enterprise-permissions#stakeholder) on the Enterprise Permissions page. +Stakeholder service tokens have all the permissions listed in [Stakeholder](/docs/collaborate/manage-access/enterprise-permissions#stakeholder) on the Enterprise Permissions page. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/user-tokens.md b/website/docs/docs/dbt-cloud-apis/user-tokens.md similarity index 100% rename from website/docs/docs/dbt-cloud/dbt-cloud-api/user-tokens.md rename to website/docs/docs/dbt-cloud-apis/user-tokens.md diff --git a/website/docs/docs/dbt-cloud/access-control/self-service-permissions.md b/website/docs/docs/dbt-cloud/access-control/self-service-permissions.md deleted file mode 100644 index e9a89281a28..00000000000 --- a/website/docs/docs/dbt-cloud/access-control/self-service-permissions.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -title: "Self-Service Permissions" -id: "self-service-permissions" ---- - -## Overview - -dbt Cloud supports two different permission sets to manage permissions for self-service accounts: **Member** and **Owner**. - -The permissions afforded to each role are described below: - -| Action | Member | Owner | -| ------ | ------ | ----- | -| View and edit resources | ✅ | ✅ | -| Trigger runs | ✅ | ✅ | -| Access the IDE | ✅ | ✅ | -| Invite Members to the account | ✅ | ✅ | -| Manage billing | ❌ | ✅ | -| Manage team permissions | ❌ | ✅ | -| Invite Owners to the account | ❌ | ✅ | - -## Read Only vs. Developer License Types - -Users configured with Read Only license types will experience a restricted set of permissions in dbt Cloud. If a user is associated with a _Member_ permission set and a Read Only seat license, then they will only have access to what a Read-Only seat allows. See [Seats and Users](cloud-seats-and-users) for more information on the impact of licenses on these permissions. - -## Owner and Member Groups in dbt Cloud Enterprise - -By default, new users are added to the Member and Owner groups when they onboard to a new dbt Cloud. Member and Owner groups are included with every new dbt Cloud account. They're used to provide access for Administrators to add additional users, groups and apply permission sets. This is necessary for account onboarding but can create confusion when initially setting up SSO and RBAC for dbt Cloud Enterprise accounts as described in the [Enterprise Permissions](enterprise-permissions) guide. Owner and Member groups are **account level** groups, so their permissions override any project-level permissions you wish to apply. - -After onboarding Administrative users and configuring RBAC/SSO groups, we recommend the following steps for onboarding users to a dbt Cloud Enterprise account. - -### Create Account Admins Group - -**Important:** Do this BEFORE proceeding to the next section - -1) Create an Account Admins group -2) Assign at least one user to the Account Admins - -The assigned user will manage future group, SSO mapping and user / group assignment. - -### Remove the Owner and Member groups - -Follow the steps below for each of the Owner and Member groups in turn: - -1) Click the Hamburger icon at the top left of the Cloud Console, then Account Settings >> Groups >> -2) Click "Edit" in the upper right corner - - - -3) Scroll to the bottom of the group page -4) Click "Delete" - -From here, the Account Admin can add additional SSO mapping groups, permission sets and users as needed. diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud.md deleted file mode 100644 index abb432d11d1..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -title: "Setup" -id: "cloud-configuring-dbt-cloud" ---- - -The following sections will help you set up your dbt Cloud account: - -- [Connecting your database](connecting-your-database) -- [Using a dbt Cloud managed repository](cloud-using-a-managed-repository) -- [Connecting your GitHub Account](cloud-installing-the-github-application) -- [Importing a project by git URL](cloud-import-a-project-by-git-url) -- [Access Control](/docs/dbt-cloud/access-control/access-control-overview) -- [Self-Service Permissions](/docs/dbt-cloud/access-control/self-service-permissions) -- [Choosing a dbt version](cloud-choosing-a-dbt-version) diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure.md deleted file mode 100644 index 864f258e0c0..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure.md +++ /dev/null @@ -1,25 +0,0 @@ ---- -title: "Authenticate with Azure DevOps" -id: "authenticate-azure" -description: "dbt Cloud developers need to authenticate with Azure DevOps." -sidebar_label: "Authenticate with Azure DevOps" ---- - -If you are a dbt Cloud developer who contributes code to the dbt Cloud IDE, and you use Azure DevOps for collaborating on code and building and deploying apps, you need to [link your dbt Cloud profile to Azure DevOps](#link-your-dbt-cloud-profile-to-azure-devops). Linking these two profiles provides access to your Azure DevOps profile from your dbt Cloud profile by authenticating. - -:::info Beta feature -This feature is currently in Beta. If you are interested in getting access to the beta, please reach out to support@getdbt.com -::: - -## Link your dbt Cloud profile to Azure DevOps - -Connect your dbt Cloud profile to Azure DevOps using OAuth: - -1. From your profile page, click **Integrations** in the left pane. -2. Click **Link your Azure DevOps Profile**. -3. Once you're redirected to Azure DevOps, sign into your account. -4. When you see the permission request screen from Azure DevOps App, click **Accept**. - - - -You will be directed back to dbt Cloud, and your profile should be linked. You are now ready for developing in the IDE! diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version.md index 2504c36b75b..c4a7e39d91c 100644 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version.md +++ b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version.md @@ -7,9 +7,9 @@ In dbt Cloud, both jobs and environments are configured to use a specific versio ### Environments -Navigate to the settings page of an environment, then click **edit**. Click the 'dbt version' dropdown bar and make your selection. From this list, you can select an available version of Core to associate with this environment. +Navigate to the settings page of an environment, then click **edit**. Click the **dbt Version** dropdown bar and make your selection. From this list, you can select an available version of Core to associate with this environment. - + Be sure to save your changes before navigating away. @@ -17,21 +17,27 @@ Be sure to save your changes before navigating away. Each job in dbt Cloud can be configured to inherit parameters from the environment it belongs to. - + -The example job seen in the screenshot above belongs to the environment "Redshift - Maintenance." It inherits the dbt version of its environment using the `inherit from ` option. You may also manually override the dbt version of a specific job to be any of the current Core releases supported by Cloud. +The example job seen in the screenshot above belongs to the environment "Prod". It inherits the dbt version of its environment as shown by the **Inherited from ENVIRONMENT_NAME (DBT_VERSION)** selection. You may also manually override the dbt version of a specific job to be any of the current Core releases supported by Cloud by selecting another option from the dropdown. ## Supported Versions -We have always encouraged our customers to upgrade dbt Core versions whenever a new minor version is released. We released our first major version of dbt - `dbt v1.0` - in December 2021. Alongside this release, we updated our policy on which versions of dbt Core we will support running in dbt Cloud. +We have always encouraged our customers to upgrade dbt Core versions whenever a new minor version is released. We released our first major version of dbt - `dbt 1.0` - in December 2021. Alongside this release, we updated our policy on which versions of dbt Core we will support in dbt Cloud. - > **By June 30, 2022, all dbt projects in Cloud must be running v1.0 or later. Starting with v1.0, any subsequent minor versions will be allowed to run in Cloud for 1 year post release.** -We will continue to update this table so that customers know when we plan to stop running different versions of Core in Cloud. + > **Starting with v1.0, any subsequent minor versions will be supported in dbt Cloud for 1 year post release. At the end of the 1 year window, accounts must upgrade to a supported version of dbt or risk service disruption.** + +We will continue to update this table so that customers know when we plan to stop supporting different versions of Core in dbt Cloud. -Starting in v1.0, dbt Cloud will ensure that you're always using the latest compatible patch release of `dbt-core` and plugins, including all the latest fixes. You may choose to try prereleases of those patch releases before they are generally available. + +:::warning ⚠️ v0.X Non-Supported Period + Accounts had until the end of June 2022 to upgrade to dbt 1.0 or later. Pre-dbt 1.0 versions will no longer receive patch fixes, and our support team will no longer assist with dbt version specific help on non-supported versions of dbt. Additionally, jobs running dbt versions prior to 1.0 may experience service disruptions before the end of the year and may be removed from the dbt Cloud context by year end. You will receive additional notification before any planned disruption to your production jobs. +::: + +Starting in v1.0, dbt Cloud will ensure that you're always using the latest compatible patch release of `dbt-core` and plugins, including all the latest fixes. You may also choose to try prereleases of those patch releases before they are generally available. -For more on version support and future releases, see "[Understanding dbt Core versions](core-versions)." +For more on version support and future releases, see [Understanding dbt Core versions](core-versions). -#### What will actually happen on the deprecation date? +#### What will actually happen on the end of support date? -- On July 1, 2022, we will only run jobs that are on dbt v1.0 or later. Customers must upgrade their projects to be compatible with dbt v1.0 or later. -- 1 year post a minor version release of v1.X, we will try to run our customers' projects on the latest release of dbt if they have not already upgraded their projects themselves. In a post dbt v1.0 world, there won't be breaking changes between minor versions of dbt, so we might be reasonably successful at upgrading our customers' versions automatically. However, our strong preference is for customers to try to manage the upgrade process themselves which is a more cautious way to prevent failures to their production pipelines. - -We will give customers consistent communication that they're hitting the end of their supported window, so they can plan accordingly. +1 year post a minor version release of v1.X, we will try to run our users' projects on the latest release of dbt if they have not already upgraded their projects themselves. In a post dbt v1.0 world, there won't be breaking changes between minor versions of dbt, so we might be reasonably successful at upgrading our users' versions for them. However, our strong preference is for accounts to try to manage the upgrade process themselves which is a more cautious way to prevent failures to their production pipelines. We will give accounts consistent communication that they're hitting the end of their supported window, so they can plan accordingly. #### What should you be doing today? You should **upgrade to v1.0 as soon as you can** - and we recommend that you proceed **slowly and steadily**. -Why? Because attempting to upgrade 6 minor versions at one time (v0.15.0 —> v0.21.0) implies 6x the potential for breaking changes, versus upgrading a single minor version. +Why? Because attempting to upgrade 6 minor versions at one time (v0.15.0 —> v0.21.0) implies 6x the potential for breaking changes, versus upgrading a single minor version. Refactoring code is much easier when you're updating a well-defined, constrained surface area. Doing things incrementally is the way to go. Additionally upgrading to more recent versions of dbt Core will enable better performance and more features in dbt Cloud. Below is a compatability matrix between dbt versions and dbt Cloud features. Hopefully this provides more motivation to always update your environments and jobs to run the latest version of dbt. - - | dbt Cloud Feature | dbt Core Version Needed | | ------------- | -------------- | -| [Environment variable secret scrubbing](/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables#handling-secrets)| v1.0+ | +| [Environment variable secret scrubbing](/docs/build/environment-variables#handling-secrets)| v1.0+ | | DAG in the IDE | v0.20.0+| -| [Metadata API](/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview.md) |v0.19.0+| -| [Dashboard status tiles](/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles) | v0.19.0+ | -| [Slim CI](/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github#slim-ci) | v0.18.0+ | +| [Metadata API](/docs/dbt-cloud-apis/metadata-api) |v0.19.0+| +| [Dashboard status tiles](/docs/deploy/dashboard-status-tiles) | v0.19.0+ | +| [Slim CI](/docs/deploy/cloud-ci-job) | v0.18.0+ | #### Need help upgrading? -If you want more advice on how to upgrade your dbt projects, check out our [migration guides](docs/guides/migration-guide/upgrading-to-0-21-0) and our [upgrading Q&A page](docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions). +If you want more advice on how to upgrade your dbt projects, check out our [migration guides](/guides/migration/versions/) and our [upgrading Q&A page](/docs/dbt-versions/upgrade-core-in-cloud). diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application.md deleted file mode 100644 index 10667856b9b..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -title: "Connecting your GitHub Account" -id: "cloud-installing-the-github-application" -sidebar_label: "Connecting GitHub" ---- - -## Overview - -Connecting your GitHub account to dbt Cloud unlocks exciting and compelling functionality in dbt Cloud. Once your GitHub account is connected, you can: -- [Trigger CI builds](cloud-enabling-continuous-integration-with-github) when Pull Requests are opened in GitHub -- Log into dbt Cloud via OAuth through GitHub -- Add new repos to Cloud in a single click (no need to fuss with Deploy Keys) - - - -:::info Use GitHub On-Premise? -This method will not work for On-Premise GitHub deployments. Please reference our -[Importing a project by git url](docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url#github) to setup your connection. This alternative method does not include the benefits mentioned above. -::: - -## Linking dbt Cloud to your GitHub account - -To link your dbt Cloud account to your GitHub account, navigate to your [user profile](https://cloud.getdbt.com/#/profile/) and click the [Integrations section](https://cloud.getdbt.com/#/profile/integrations/). Here you can link your GitHub account to dbt Cloud. - - - -On the next page, you can select a GitHub organization to install the dbt Cloud application into. - - - -The dbt Cloud GitHub App requires the following permissions: -- Read access to metadata -- Read and write access to checks, code, commit statuses, pull requests, and workflows (new!) - -## Usage notes - -A GitHub organization owner needs to connect and configure the dbt Cloud app for their organization. Users on the Developer or Team plans do not need to each connect to GitHub, but it is recommended to do so. Users on the Enterprise plan must each connect their GitHub accounts, as dbt Cloud will enforce the repository's access permissions for every user in the IDE. - -## Connect your personal GitHub account - -To connect your personal GitHub account, navigate to your [User Profile](https://cloud.getdbt.com/#/profile/) select the [Integrations section](https://cloud.getdbt.com/#/profile/integrations/) from the left sidebar. Under “GitHub”, if your account is already connected, you’ll see "Your user account is linked to a GitHub account with username <your-github-username>." - -If your account is not connected, you’ll see "This account is not linked to a GitHub account.” Click the button to begin the setup process. You’ll be redirected to GitHub, and then back into dbt Cloud. When you are redirected to dbt Cloud, you should now see your connected account. The next time you log into dbt Cloud, you will be able to do so via OAuth through GitHub. diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-setting-up-bigquery-oauth.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-setting-up-bigquery-oauth.md deleted file mode 100644 index 48349020dab..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-setting-up-bigquery-oauth.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -title: "Setting up BigQuery OAuth" ---- - -:::info Enterprise Feature - -This guide describes a feature of the dbt Cloud Enterprise plan. If you’re interested in learning more about an Enterprise plan, contact us at sales@getdbt.com. - -::: - -dbt Cloud supports [OAuth authentication](https://cloud.google.com/bigquery/docs/authentication) with BigQuery. When BigQuery OAuth is enabled, users can interact with the BigQuery warehouse as individual users, rather than leveraging a shared service level authentication. - -:::info Some Pre-Work Required - -Before setting up a Client ID & Secret, you'll have to have your existing BigQuery Settings in order. We recommend using a Service Account JSON file, and have a walkthrough for that [here](setting-up#generate-bigquery-credentials) - you will also need to set up an [OAuth Consent Screen](https://support.google.com/cloud/answer/6158849) if you haven't already! - -::: - -### Configuring a Client ID & Secret -To enable BigQuery OAuth, you will need a Client ID & Secret for [authentication](https://cloud.google.com/bigquery/docs/authentication) with BigQuery to manage the OAuth connection between dbt Cloud and BigQuery. - -In the BigQuery console you'll want to navigate to the Credentials page: - - - -There you'll see your existing Keys, Client IDs and Service Accounts - you'll want to click the "Create Credentials" button at the top and follow the steps, like this: - - - -For the fields we recommend the following: - -| Config | Value | -| ------ | ----- | -| **Application type** | Web application | -| **Name** | dbt Cloud | -| **Authorized Javascript origins** | `https://cloud.getdbt.com` | -| **Authorized Redirect URIs** | `https://cloud.getdbt.com/complete/bigquery` | - - -:::info Deployment Nuance - -If you're using a deployment other than the standard dbt Cloud multi-tenant, you'll need to replace `cloud.getdbt.com` with the hostname of -your cloud instance here! - -::: - - -Then, click the blue Create button, which will display your Client ID and Client Secret, with handy clipboard buttons for copying into other screens, which is exactly what we're about to do. These values will continue to be available in your Credentials screen in perpetuity, *this is not the only chance you have to access them*. - - -### Configure the Connection in dbt Cloud - -Back in dbt Cloud, you'll want to navigate to your Connection page for BigQuery. There you'll be able to click the Edit button in the top corner to enable writing into the OAuth 2.0 Settings boxes near the bottom. - - - -With Editing enabled, you can copy paste the Client ID and the Client Secret you created in BigQuery into their respective boxes, and return to the top of the page, to Save your new OAuth Credentials. diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions.md deleted file mode 100644 index 59a7dbb3553..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions.md +++ /dev/null @@ -1,243 +0,0 @@ ---- -title: "Upgrading dbt versions in Cloud" -id: "cloud-upgrading-dbt-versions" ---- - -This Q&A guide should help you figure out what changes you might need to make to successfully upgrade your version of dbt Core in dbt Cloud. As a reminder, we recommend everyone upgrade to the most recent version of dbt, as we will not support running all versions of dbt in Cloud indefinitely. We document which versions of dbt Core we support [here](docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version#supported-versions). - -There aren't many breaking changes between versions, and it may be the case that you don't need to change any code to upgrade to a newer version of dbt in dbt Cloud. You may only need to change the settings in your environment or job to call a more recent version of dbt - directions to do so can be found [here](/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version.md). - - -### How to run the latest version of dbt in Cloud - -#### Changes between minor versions of dbt that will affect your project - -Below we try to help you answer the question of whether a known breaking change between minor versions of dbt will affect your project. If you answer "yes" to any of the questions below, we recommend that you read the migration guides that we've put together for every dbt minor version release. - -:::info An Important Note on Packages - -If you use any packages from [dbt Hub](https://hub.getdbt.com/), make sure you also upgrade to a version of the package that supports the dbt version you intend to upgrade to. You can see which dbt versions a package supports by checking on the `require-dbt-version:` in the package's dbt_project.yml file on GitHub. - -As an example, dbt-utils version 0.7.6 supports dbt v0.20, v0.21, and v1.0, as described in its [dbt_project.yml](https://github.com/dbt-labs/dbt-utils/blob/0.7.6/dbt_project.yml). - -After you've changed the package version in your packages.yml file, be sure to run `dbt deps` in the IDE to install the updated version. - -::: - -
- Upgrading to v1.0.latest from v0.21 -

- -:::info Universal change -Certain configurations in dbt_project.yml have been renamed -::: - -Existing projects will see non-breaking deprecation warnings. You can change three lines in most projects to remove the warnings: - - - -```yml -model-paths: ["models"] # formerly named "source-paths" -seed-paths: ["data"] # formerly named "data-paths" -clean-targets: - - "target" - - "dbt_packages" # formerly named "dbt_modules" -``` - - - -- Do you select tests using the old names for test types? (`test_type:schema`, `test_type:data`, `--schema`, `--data`) -- Do you have custom macro code that calls the (undocumented) global macros `column_list`, `column_list_for_create_table`, `incremental_upsert`? -- Do you have custom scripts that parse dbt JSON artifacts? -- (BigQuery only) Do you use dbt's legacy capabilities around ingestion-time-partitioned tables? - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-v1.0). - -
- - -
- Upgrading to v0.21.latest from v0.20 -

- -- Do you select specific sources to check freshness (`dbt snapshot-freshness --select `)? -- Do you have custom scripts that parse dbt JSON artifacts? -- (Snowflake only) Do you have custom macros or materializations that depend on using transactions, such as statement blocks with `auto_begin=True`? - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-0-21-0). - -
- - - -
- Upgrading to v0.20.latest from v0.19 -

- -- Does your project define any custom schema tests? -- Does your project use `adapter.dispatch` or the `spark_utils` package? -- Do you have custom scripts that parse dbt JSON artifacts? - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-0-20-0). - -
- - - -
- Upgrading to v0.19.latest from v0.18 -

-
- -:::info Important - -If you have not already, you must add `config-version: 2` to your dbt_project.yml file. -See **Upgrading to v0.17.latest from v0.16** below for more details. - -::: -
- - -- Do you have custom scripts that parse dbt JSON artifacts? -- Do you have any custom materializations? - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-0-19-0). - -
- - -
- Upgrading to v0.18.latest from v0.17 -

- -- Do you directly call `adapter_macro`? - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-0-18-0). - -
- - - -
- Upgrading to v0.17.latest from v0.16 -

-
- -:::info Universal change - -You must add `config-version: 2` to your dbt_project.yml file. -::: -
- - - -```yml -name: my_project -version: 1.0.0 - -config-version: 2 - -vars: - my_var: 1 - another_var: true - -models: - ... -``` - - - -
- -:::info Universal change - -`vars:` are now defined not in your `models:` but are a separate section in dbt_project.yml file. -::: -
- - - - -```yml -name: my_project -version: 1.0.0 - -config-version: 2 - -vars: - my_var: 1 - another_var: true - -models: - ... -``` - - - - -- Do you have dictionary configs in your dbt_project.yml such as `partition_by` or `persist_docs`? If yes, you need to add a preceding +. - - - -```yml - -models: - my_project: - reporting: - +partition_by: - field: date_day - data_type: timestamp -``` - - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-0-17-0). - -
- - -
- Upgrading to v0.16.latest from v0.15 -

- -- Do you use the custom `generate_schema_name` macro? -- Do you use `partition_by` config for BigQuery models? - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-0-16-0). -
- - -
- Upgrading to v0.15.latest from v0.14 - -

- -- Do you have a custom materialization? -- Do you have a macro that accesses `Relations` directly? - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-0-15-0). -
- -
- Upgrading to v0.14.latest from v0.13 -

- -- Do you still use `Archives`? -- Do you use the custom `generate_schema_name` macro? -- Do you use the `—non-destructive` flag? - -If you believe your project might be affected, read more details in the migration guide [here](/docs/guides/migration-guide/upgrading-to-0-14-0). -
- - -#### Testing your changes before upgrading -Once you have an idea about what code changes you'll need to make, you can start implementing them. We recommend that you create a separate dbt project, `Upgrade Project`, to test your changes before making them live in your main dbt project. In your `Upgrade Project`, connect to the same repository that you use for your main dbt project, but this time, set the development environment [settings](docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version) to run the latest version of dbt Core. Next check out a branch `dbt-version-upgrade`, make the appropriate updates to your project (if needed), and see if your dbt project compiles and runs with the new version of dbt in the IDE. If jumping directly to the latest version of dbt is too far of a leap for your project, try iteratively getting your project to work on each successive minor version. There are years of development and a handful of breaking changes between two distant versions of dbt (e.g. 0.14 --> 0.20). There are far fewer between two subsequent versions of dbt, which is why upgrading regularly is important. - -Once you have your project compiling and running on the latest version of dbt in the development environment for your `dbt-version-upgrade` branch, try replicating one of your production jobs to run off your branch's code. You can do this by creating a new deployment environment for testing, setting the custom branch to 'ON' and referencing your `dbt-version-upgrade` branch. You'll also need to set the dbt version in this environment to the latest dbt Core version. - - - - - - -Then add a job to the new testing environment that replicates one of the production jobs your team relies on. If that job runs smoothly, you should be all set to merge your branch into main and change your development and deployment environments in your main dbt project to run off the newest version of dbt Core. - diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository.md deleted file mode 100644 index 25ae7bf8f8d..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -title: "Using a dbt Cloud managed repository" -id: "cloud-using-a-managed-repository" ---- - -If you do not already have a git repository for your dbt project, you can let dbt Cloud manage a repository for you. Managed repositories are a great way to trial dbt without needing to spin a new repository. - -To create a managed repository, choose the "Managed" option when creating a repository in dbt Cloud. Next, supply the name for the repository -- something like "analytics" or "dbt-models" is a good choice. - -Once saved, dbt Cloud will host and manage this repository for you. If in the future you choose to host this repository yourself, you can contact support to have the contents of your repo transferred to you. - - - - -** We do not recommend productionizing with a managed repository. This is because you will not be able to use git features like pull requests which are part of our recommended version control best practices. diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-azure-devops.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-azure-devops.md deleted file mode 100644 index 7494598b632..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-azure-devops.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -title: "Connecting your Azure DevOps Account" -id: "connecting-azure-devops" -sidebar_label: "Connecting Azure DevOps" ---- - -:::info Beta feature -This feature is currently in Beta. If you are interested in getting access to the beta, please reach out to support@getdbt.com. - -You can still use dbt Cloud with Azure DevOps before this feature is generally available by [following these instructions](/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url#azure-devops). -::: - -## About Azure DevOps and dbt Cloud - -You can connect your Azure DevOps account and use Azure Active Directory (Azure AD) to enable identity and access management in dbt Cloud: - -- Import new Azure DevOps repos with a couple clicks during dbt Cloud project setup. -- Enforce user authorization with OAuth 2.0. -- Carry Azure DevOps user repository permissions through to dbt Cloud IDE's git actions. -- Trigger Continuous integration (CI) builds when pull requests are opened in Azure DevOps. (Coming soon!) - -To connect Azure DevOps and use Azure AD in dbt Cloud: - -1. An account admin needs to [set up an Active Directory application and add it to dbt Cloud](docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure). -2. dbt Cloud developers need to [personally authenticate with Azure DevOps](docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure) from dbt Cloud. - - diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab.md deleted file mode 100644 index 8b98e956811..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -title: "Connecting your GitLab Account" -id: "connecting-gitlab" -sidebar_label: "Connecting GitLab" ---- - -## Overview - -Connecting your GitLab account to dbt Cloud unlocks exciting and compelling functionality in dbt Cloud. Once your GitLab account is connected, you can: -- [Trigger CI builds](cloud-enabling-continuous-integration-with-github) when Merge Requests are opened in GitLab. -- Import new repos with one click -- Carry GitLab user permissions through to dbt Cloud IDE's git actions. -## For Developer and Team tiers - -To connect your personal GitLab account, navigate to your [Profile](https://cloud.getdbt.com/#/profile/) and select [Integrations](https://cloud.getdbt.com/#/profile/integrations/) from the left sidebar. - -If your account is not connected, click the button `Link your GitLab account` to continue the setup process. - - -You should be redirected to GitLab and prompted to sign into your account. GitLab will then ask for your explicit authorization: - - - -Once you've accepted, you should be redirected back to dbt Cloud, and you'll see that your account has been linked. - - - -Now that you've linked to your account from your Profile page, you should verify that your Project Settings page also shows your deploy token. - -## For Enterprise tier - -Before developers can personally authenticate in GitLab, account admins need to set up a GitLab application. - -Account admins should navigate to `Account Settings` and click on the `Integrations` tab. - - - - -Admins can create a Group Owned Application in GitLab - [GitLab has a guide for that here](https://docs.gitlab.com/ee/integration/oauth_provider.html#group-owned-applications). - -In GitLab, when creating your Group Owned Application, input the following: -- `Name` - We recommend `dbt Cloud` -- `Redirect URI` - You should copy-paste this from your Account Integrations page in dbt Cloud, but it is likely `https://cloud.getdbt.com/complete/gitlab` -- Check the `Confidential` box -- Check the `api` Scope - -The application form in GitLab should look like this: - - - -Click `Save application` in GitLab, and GitLab will then generate an Application ID and Secret. You can copy-paste those values back into the form on your dbt Cloud's Integrations page. - - - -Note that if you're using the regular consumer version of GitLab, you'll want to use the standard `https://gitlab.com` for your GitLab instance - if your organization is using a hosted version of GitLab, you'll want to use the hostname provided by your organization: `https://gitlab.yourgreatcompany.com/`. - -Once the form is complete in dbt Cloud, click the button `Connect to GitLab`. - -GitLab will then ask for your explicit authorization: - - - -And you're all set! - -Non-admins on Enterprise accounts can authenticate by going to `Profile` and then `Integrations` and clicking the `Link your GitLab account` button. - - - - -## Troubleshooting - -### Errors importing a repository -If you do not see your repository listed, double-check that: -- Your repository is in a Gitlab group you have access to. dbt Cloud will not read repos associated with a user. - -If you do see your repository listed but are unable to import the repository successfully, double-check that: -- You are a maintainer on that repository (only users with maintainer permissions can set up repository connections) diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure.md b/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure.md deleted file mode 100644 index 7784bb2836c..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -title: "Set up Azure DevOps" -id: "setup-azure" -description: "You can set up your Azure DevOps by creating an Azure AD app and adding it to dbt Cloud." -sidebar_label: "Set up Azure DevOps" ---- - -## Overview - -To use Azure Active Directory (Azure AD) for identity and access management in dbt Cloud, an account admin needs to set up an Azure AD app: - -1. [Register an Azure AD app](#register-an-azure-ad-app) in Azure DevOps. -2. [Add permissions to your new app](#add-permissions-to-your-new-app). -3. [Add another redirect URI](#add-another-redirect-URI). -4. [Connect Azure DevOps to your new app](#connect-azure-devops-to-your-new-app). -5. [Add your Azure AD app to dbt Cloud](#add-your-azure-ad-app-to-dbt-cloud). - -Once the the Azure AD app is added to dbt Cloud, an account admin must also connect a service user via OAuth, which will be used to power headless actions in dbt Cloud such as scheduled runs and CI. -1. [Connecting a Service User](#connecting-a-service-user). - -Once the Azure AD app added to dbt Cloud and the service user is connected, then dbt Cloud developers can personally authenticate in dbt Cloud from Azure DevOps. For more on this, see [Authenticate with Azure DevOps](docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure). - -:::info Beta feature -This feature is currently in Beta. If you are interested in getting access to the beta, please reach out to support@getdbt.com. -::: - -## Register an Azure AD app - -1. Sign into you Azure DevOps account and click **Azure AD**. -2. Select **App registrations** in the left panel. -3. Select **New registration**. The form for creating a new Active Directory app opens. -4. Provide a name for your app. We recommend using, "dbt Labs Azure DevOps App". -5. Select **Accounts in any organizational directory (Any Azure AD directory - Multitenant)** as the Supported Account Types. -6. Add a redirect URI by selecting **Web** and typing in `https://cloud.getdbt.com/complete/azure_active_directory`. If you have a custom dbt Cloud URL be sure to use the appropriate domain. -7. Click **Register**. - - - -Here's what your app should look before registering it: - - - -## Add permissions to your new app - -Provide your new app access to Azure DevOps: - -1. Select **API permissions** in the left navigation panel. -2. Remove the **Microsoft Graph / User Read** permission. -3. Click **Add a permission**. -4. Select **Azure DevOps**. -5. Select the **user_impersonation** permission. - - - -## Add another redirect URI - -You also need to add an another redirect URI to your Azure AD application. This redirect URI will be used to authenticate the service user for headless actions in deployment environments. - -1. Navigate to your Azure AD application. -2. Select the link next to **Redirect URIs** -3. Click **Add URI** and add the URI, making sure to use the appropriate domain if you have a custom dbt Cloud URL: -`https://cloud.getdbt.com/complete/azure_active_directory_service_user` -4. Click **Save**. - - - - - - -## Connect Azure DevOps to your new app - -Connect Azure DevOps to the Active Directory App you just created: - -1. From your Azure DevOps account, select **Organization settings** in the bottom left. -2. Navigate to Azure Active Directory. -3. Click **Connect directory**. -4. Select the directory you want to connect. -5. Click **Connect**. - - - -## Add your Azure AD app to dbt Cloud - -Once you connect your Azure AD app and Azure DevOps, you need to provide dbt Cloud information about the app: - -1. Navigate to your account settings in dbt Cloud. -2. Select **Integrations**. -3. Scroll to the Azure DevOps section. -4. Complete the form: - - **Azure DevOps Organization:** Must match the name of your Azure DevOps organization exactly. - - **Application (client) ID:** Found in the Azure AD App. - - **Client secrets:** Need to first create in the Azure AD App under "Client credentials." You are responsible for the Azure AD app secret expiration and rotation. - - **Directory(tenant) ID:** Found in the Azure AD App. - - - - -Your Azure AD app should now be added to your dbt Cloud Account. People on your team who want to develop in dbt Cloud's IDE can now personally [authorize Azure DevOps from their profiles](dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure). - -## Connecting a service user -Azure DevOps' permissions are tightly coupled to a developer's identity. However, in dbt Cloud deployment environments, you should avoid tying runs to an individual's identity as that creates production problems if an individual loses access to a dbt project repository. - -Instead, we recommend creating service account users in Azure DevOps to power headless actions in dbt Cloud deployment environments. This service user must have read access to all dbt repos across projects in the dbt Cloud account. dbt Cloud will refresh the OAuth access token regularly. - -:::info Azure DevOps admin must grant read access to the service user -This service user's permissions will also power which repos a team can select from during dbt project set up, so an Azure DevOps admin must grant read access to the service user before setting up a project in dbt Cloud. -::: - -To connect the service user: -1. An admin must first be signed into the service user's Azure DevOps account. -2. The admin should click **Link Azure Service User** in dbt Cloud. -3. The admin will be directed to Azure DevOps and must accept the Azure AD app's permissions. -4. Finally, the admin will be redirected to dbt Cloud, and the service user will be connected. - - -Once connected, dbt Cloud displays the email address of the service user so you know which user's permissions are enabling headless actions in deployment environments. To change which account is connected, disconnect the profile in dbt Cloud, sign into the alternative Azure DevOps service account, and re-link the account in dbt Cloud. diff --git a/website/docs/docs/dbt-cloud/cloud-ide/ide-beta.md b/website/docs/docs/dbt-cloud/cloud-ide/ide-beta.md new file mode 100644 index 00000000000..7e8fdc16c0b --- /dev/null +++ b/website/docs/docs/dbt-cloud/cloud-ide/ide-beta.md @@ -0,0 +1,221 @@ +--- +title: "Develop in the dbt Cloud IDE (beta)" +id: "ide-beta" +--- + + + +:::info Join our beta + +If you’d like to try the dbt Cloud IDE for multi-tenant instances, please [sign up](https://docs.google.com/forms/d/e/1FAIpQLSdlU65gqTZPyGAUc16SkxqTc50NO9vdq_KGx1Mjm_4FB_97FA/viewform) to join the beta. To learn more about the beta features, you can read this documentation. + +::: + +## Overview + +The dbt Cloud integrated development environment (IDE) is where you can build, test, run, and version control your dbt projects directly from your browser. The IDE is the fastest and most reliable way to deploy dbt, and provides a real-time editing and execution environment for your dbt project -- no command line use required. + +To develop in dbt Cloud IDE (beta), you need to meet these requirements: + + +- Your dbt project must be compatible with dbt v0.15.0. The dbt IDE is powered by the [dbt-rpc](reference/commands/rpc) which was overhauled in dbt v0.15.0. +- You must have a [Developer License](/docs/collaborate/manage-access/seats-and-users). +- Currently only multi-tenant instances of dbt Cloud can develop in the updated beta version of the Cloud IDE. Single-tenant instances will soon be able to opt into this Beta release. +- Your dbt repository (in dbt Cloud) must have `write` access enabled. See [Connecting your GitHub Account](/docs/collaborate/git/connect-github) and [Importing a project by git URL](/docs/collaborate/git/import-a-project-by-git-url) for detailed setup instructions. + + +The IDE is a single interface for building, testing, running, and version controlling dbt projects from your browser. Anyone can use the IDE, from new dbt developers to seasoned practitioners. + + +To use the dbt Cloud IDE, you need to log in with a dbt Cloud account and click **Develop** at the top of the page. + +You can refer to [Getting Started with dbt Cloud](/docs/get-started/getting-started/set-up-dbt-cloud) to quickly get set up and perform some key tasks. For more information, see the following articles: + +- [What is dbt?](docs/introduction#what-else-can-dbt-do) +- [Building your first project](/docs/get-started/getting-started/building-your-first-project) +- [dbt Learn courses](https://courses.getdbt.com/collections) +- [Using Git](https://docs.github.com/en/github/getting-started-with-github/using-git) + +**Is there a cost to using the dbt Cloud IDE?** + +Not at all! You can use dbt Cloud when you sign up for the Free [Developer plan](https://www.getdbt.com/pricing/), which comes with one developer seat. If you’d like to access more features or have more developer seats, you can upgrade your account to the Team or Enterprise plan. See dbt [Pricing plans](https://www.getdbt.com/pricing/) for more details. + +**Can I be a contributor to dbt Cloud?** + +Anyone can contribute to the dbt project. And whether it's a dbt package, a plugin, dbt-core, or this documentation site, contributing to the open source code that supports the dbt ecosystem is a great way to level yourself up as a developer, and give back to the community. See [Contributing](/docs/contributing/oss-expectations) for details on what to expect when contributing to the dbt open source software (OSS). + +**What is the difference between developing on the dbt Cloud IDE and on the CLI?** + +There are two main ways to develop with dbt: using the web-based IDE in dbt Cloud or using the command-line interface (CLI) in dbt Core. + +- **dbt Cloud IDE** - dbt Cloud is a Web-based application that allows you to develop dbt projects with the IDE, includes a purpose-built scheduler, and provides an easier way to share your dbt documentation with your team. The IDE is a faster and more reliable way to deploy your dbt models, and provides a real-time editing and execution environment for your dbt project. + +- **dbt Core CLI** - The CLI uses [dbt Core](docs/introduction), an [open-source](https://github.com/dbt-labs/dbt) software that’s freely available. You can build your dbt project in a code editor, like Jetbrains or VSCode, and run dbt commands from the command line. + +**What type of support is provided with dbt Cloud?** + +The global dbt Support team is available to help dbt Cloud users by email or in-product live chat. Developer and Team accounts offer 24x5 support, while Enterprise customers have priority access and options for custom coverage. + +If you have project-related or modeling questions, you can use our dedicated [Community Forum](/community/forum) to get help as well. + +## dbt Cloud IDE features + +With dbt Cloud IDE, you can: + +- Write modular SQL models with `select` statements and the [`ref()`](/reference/dbt-jinja-functions/ref) function +- Test every model before deploying them to production +- Share the generated documentation of your models with all data stakeholders +- Deploy safely using development environments like how git-enabled version control enables collaboration and a return to previous states + + +**Find and replace** + +Press Command-F or Ctrl-F to open the find and replace bar in the upper right corner of the current file in the IDE. The IDE highlights your search results in the current file and code outline. You can use the up and down arrows to see the match highlighted in the current file when there are multiple matches. To replace the text with something else, use the left arrow. + +**Search across files** + +You can quickly search over all files in the IDE on your current project. To search, open the search bar by pressing Command-O or Ctrl-O to find text across all files in your current project. and write your file name. You can view the results under the search text, which are grouped into files containing the match. You can click on the results to view it in the IDE. + +**Keyboard shortcuts** + +There are default keyboard shortcuts that can help make development more productive and easier for everyone. Press Fn-F1 to view a list of all of them. + +**Multiple selections** + +You can make multiple selections for quick and simultaneous edits. The below commands are a common way to add more cursors and allow you to insert cursors below or above with ease. + +- Option-Command-Down arrow +- Option-Command-Up arrow +- Press Option and click on an area + + + +**File explorer** + +The File explorer on the left side of the IDE allows you to organize your project and manage your files and folders. Click the three dot menu associated with the file or folder to create, rename, and delete it. + +**Drag and drop** + +You can also drag and drop files located in the file explorer. Use the file breadcrumb on the top of the IDE for quick, linear navigation. You can access adjacent files in the same file by right clicking on the breadcrumb file. + + + +**Organize tabs** + +You can move your tabs around to reorganize your work in the IDE. You can also right click on a tab to view and select a list of actions to take. + + + +## Development in the dbt Cloud IDE Beta + +With the IDE, you can compile dbt code into SQL and run it against your database directly. It leverages the open-source [dbt-rpc](/reference/commands/rpc) plugin to intelligently recompile only the changes in your project. + +The dbt Cloud IDE Beta brings the startup and interaction time for dbt project development down from minutes to seconds. + +In dbt, SQL files can contain Jinja, a lightweight templating language. Using Jinja in SQL provides a way to use control structures (e.g. `if` statements and `for` loops) in your queries. It also lets you share SQL code through `macros`. + +You can invoke dbt commands, compile jinja into query, preview data from the warehouse, visualize a directed acyclic graph (DAG), and more. + +**Hot and cold start** + +You can launch the dbt Cloud IDE from a cold start or a hot start. + +- **Cold start** -- The process of starting an IDE session for the first time. Cold starting the IDE can take about 30 seconds to load. Behind the scene, dbt is provisioning a dedicated server for you to build a dbt project. After this step finishes, the IDE is ready for use. In the meantime, dbt is also starting up the dbt-rpc container to interact with dbt-core. You don’t need to wait for this to finish before the IDE is ready for editing. + +- **Hot start** -- The process of resuming an existing IDE session (within 3 hours of the last activity). Hot starting is faster and takes less than 3 seconds to load. This is because the environment is already available and you’re simply resuming your session. + +dbt Labs closely monitors these two start modes as key performance metrics to help ensure consistent and reliable experiences. + +**Work retention** + +You must save your work to avoid losing it. The dbt Cloud IDE needs an explicit action to save your changes. There are three ways your work is stored: + +- **Unsaved, local code** -- Any code you write is automatically available from your browser’s storage. You can see your changes but will lose it if you switch branches or browsers (another device or browser). +- **Saved but uncommitted code** -- When you save a file, the data gets stored in your local storage (EFS storage). If you switch branches but don’t _commit_ your saved changes, you will lose your changes. +- **Committed code** -- Your git branch repository contains all your changes. You can check out other branches or switch browsers without losing your changes. + + +**Run projects** + +You can also *build*, *run* *and test* dbt projects directly in the dbt IDE using our ‘Build’ feature. You can use dbt's [rich model selection syntax](https://docs.getdbt.com/reference/node-selection/syntax) to [run dbt commands](https://docs.getdbt.com/reference/dbt-commands) directly within dbt Cloud. + +The IDE updates in real-time as models, tests, seeds, and operations are run. If a model or test fails, you can review the logs to find and fix the issue. + + + +**Lineage tab** + +The visual in the Lineage tab adds more context to your dependencies and directional flow. + +You get to see how models are used as building blocks from left to right to transform your data from crude or normalized raw sources, into cleaned-up modular derived pieces, and finally into the final outputs on the far right of the DAG, ready to be used by the analyst in infinite combinations to present it in ways to help clients, customers, and organizations make better decisions. + +You can access files in the lineage tab by double clicking on a particular model. + +**Command bar + status** + +You can enter and run commands from the command bar at the bottom of the IDE. Use the [rich model selection syntax](/reference/node-selection/syntax) to [run dbt commands](/reference/dbt-commands) directly within dbt Cloud. You can also view the history, status, and logs of previous runs by clicking **History**. + +:::info Note + +For your convenience, dbt Cloud automatically includes ‘`dbt`’ as a prefix to your command so you don’t need to enter it. You can also type the ‘`dbt`’ prefix in your command. +::: + + +The status icon on the lower right corner of the IDE gives you an indicator of the health of your project. You can identify errors by clicking on the status icon for more details or by clicking **Restart the IDE**. + +**Generating and viewing documentation** + +To generate your project’s documentation (docs) in the IDE, enter `docs generate` or `dbt docs generate` in the command bar. This command generates the docs for your dbt project as it currently exists in development. + +After you generate a successful run, you can view your [documentation](https://docs.getdbt.com/docs/building-a-dbt-project/documentation) for your dbt project in real time. You can inspect and verify what your project's documentation will look like before you deploy your changes to production. + +Click **View Docs** on top of the file explorer to see the latest version of your documentation rendered in a new browser window. + + + +## Version control basics + +A [version control](https://en.wikipedia.org/wiki/Version_control) system allows you and your teammates to work collaboratively, safely, and simultaneously on a single project. Version control helps you track all the code changes made in the dbt Cloud IDE. + +When you develop in the dbt Cloud IDE, you can leverage Git directly to version control your code from your browser. This means you can branch, commit, push, and pull code with a couple of clicks - no command line required! + +You can create a separate branch to develop and make changes. The changes you make aren’t merged into the main branch unless it successfully passes tests. This helps keep the code organized and improves productivity by making the development process smooth. + + + +:::info Note +To use version control, make sure you are connected to a Git repository in the IDE. +::: + +**Common git terms** + + +| Name | Definition | +| --- | --- | +| Repository or repo | A repository is a directory that stores all the files, folders, and content needed for your project. You can think of this as an object database of the project, storing everything from the files themselves to the versions of those files, commits, and deletions. Repositories are not limited by user, and can be shared and copied.| +| Branch | A branch is a parallel version of a repository. It is contained within the repository, but does not affect the primary or main branch allowing you to work freely without disrupting the _live_ version. When you've made the changes you want to make, you can merge your branch back into the main branch to publish your changes | +| Checkout | The checkout command is used to create a new branch, change your current working branch to a different branch, or switch to a different version of a file from a different branch. | +| Commit | A commit is a user’s change to a file (or set of files). When you make a commit to save your work, Git creates a unique ID that allows you to keep a record of the specific changes committed along with who made them and when. Commits usually contain a commit message which is a brief description of what changes were made. | +| Main | The primary, base branch of all repositories. All committed and accepted changes should be on the Main (or master) branch. In the dbt Cloud IDE, the “Main” branch will be read-only. This is because any changes/edits to code cannot and should not be made directly in the base branch. A new branch should be created in order to make any changes to your project | +| Merge | Merge takes the changes from one branch and adds them into another (usually main) branch. These commits are usually first requested via pull request before being merged by a maintainer. | +| Pull Request | If someone has changed code on a separate branch of a project and wants it to be reviewed to add to the main branch, they can submit a pull request. Pull requests ask the repo maintainers to review the commits made, and then, if acceptable, merge the changes upstream. A pull happens when adding the changes to the main branch. | +| Push | A push updates a remote branch with the commits made to the current branch. You are literally “pushing” your changes onto the remote. | +| Remote | This is the version of a repository or branch that is hosted on a server. Remote versions can be connected to local clones so that changes can be synced. | + + +**The Git button in the IDE** + +The git button in the dbt Cloud IDE allows you to apply the concept of version control to your project. This page provides descriptions of each git button command and what they do: + +| Name | Actions | +| --- | --- | +| Abort merge | This option allows you to cancel a merge that had conflicts. Please note that all the changes will be reset, and this operation cannot be reverted, so make sure to commit or save all your changes before you start a merge. | +| Change branch | This option will allow you to change between branches (checkout). | +| Commit and push | Committing is similar to saving any changes made within your repo. In the above situation, the changes being saved or committed are the initialization of the project. The required files and folders are being added. When you make changes to your code in the future, you'll need to commit them as well. This allows you to record what changes were made when they were made, and who made them. | +| Create new branch | This allows you to branch off of your base branch and edit your project. You’ll notice after initializing your project that the “main branch will be read-only. This is because any changes to code cannot and should not be made directly in the base branch. A new branch should be created in order to make any changes to your project. | +| Initialize your project | This is done when first setting up your project. Initializing a project creates all required directories and files within an empty repository. Note: This option will not display if your repo isn't completely empty (i.e. includes a README file). Once you click **initialize your project** you'll want to click **commit** to finish setting up your project. | +| Open pull request | This allows you to open a pull request in Git for peers to review changes before merging into the base branch.| +| Pull changes from master/main | This option is available if you are on any local branch that is behind the remote version of the base branch or the remote version of the branch that you're currently on. | +| Pull from remote | This option is available if you’re on the local base branch and changes have recently been pushed to the remote version of the branch. As such, pulling in changes from the remote repo will allow you to pull in the most recent version of the base branch. | +| Reclone Your Repository | This allows you to reset your repository back to a fresh clone from your remote. You can use this option when you need to reclone your repo or if there are any git-related errors you’re experiencing in the dbt Cloud IDE. Reclone your repository is not available in the beta launch | +| Refresh git state | This enables you to pull new branches from a different remote branch to your local branch with just one command. | diff --git a/website/docs/docs/dbt-cloud/cloud-ide/the-dbt-ide.md b/website/docs/docs/dbt-cloud/cloud-ide/the-dbt-ide.md deleted file mode 100644 index 8f27ffba06f..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-ide/the-dbt-ide.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: "The dbt IDE" -id: "the-dbt-ide" ---- - - -:::info Prerequisites - -You must have a dbt Cloud account to use the IDE. Consult the guide on [using the dbt IDE](using-the-dbt-ide). Don't have an account? You can get started for free [here](https://www.getdbt.com/signup). - -::: - - -The dbt Integrated Development Environment (IDE) provides a realtime editing and execution environment for your dbt project. In the dbt IDE, you can write, run, test, and version control the code in your dbt project from your browser -- no command line use required. - -## Compiling and Running SQL - -In the dbt IDE, you can compile dbt code into SQL and execute it against your database directly. The IDE leverages the open-source [dbt server](rpc) to intelligently recompile only the parts of your project that have changed. This brings the cycle time for dbt project development down from minutes to seconds. - - - -## Running Projects - -In addition to compiling and executing SQL, you can also *run* dbt projects in the dbt IDE. Use dbt's [rich model selection syntax](node-selection/syntax) to [run dbt commands](dbt-commands) directly in your browser. - -The dbt IDE updates in real-time as models, tests, seeds, and operations are run. If a model or tests fails, you can dig into the logs to find and fix the issue. - - - -## Version Control - -Leverage git directly from the dbt IDE to version control your code from your browser. You can branch, commit, push, and pull code with a couple of clicks - no command line required. - - - -## Dark mode -As Ben Franklin once said: - -> In matters of principle, stand like a rock; in matters of taste, swim with the current. - - diff --git a/website/docs/docs/dbt-cloud/cloud-ide/the-ide-git-button.md b/website/docs/docs/dbt-cloud/cloud-ide/the-ide-git-button.md deleted file mode 100644 index 6d2fd4a079a..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-ide/the-ide-git-button.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -title: "The IDE Git Button" ---- - -The git button in the IDE allows you to apply the concept of [version control](dbt-cloud/cloud-ide/the-dbt-ide#version-control) to your project. This page provides descriptions of each git button command and what they do. - -**Initialize your project**: This is done when first setting up your project. Initializing a project will create all required directories and files within an empty repository. Note: This option will not display if your repo isn't completely empty (i.e. includes a README file). - -

- -

- -Once you click "initialize your project" you'll see the following in your IDE. Click "commit" to finish setting up your project. - -

- -

- -**Commit**: Committing is similar to "saving" any changes made within your repo. In the above situation, the changes being saved or committed are the initialization of the project. The required files and folders are being added. When you make changes to your code in the future, you'll need to commit them as well. This allows you to record what changes were made, when they were made, and who made them. - -

- -

- -**Create new branch**: This allows you to branch off of your base branch and edit your project. You’ll notice after initializing your project that the “master” or “main” branch will be read-only. This is because any changes/edits to code cannot and should not be made directly in the base branch. A new branch should be created in order to make any changes to your project. - -

- -

- -**Open pull request**: This allows you to open a pull request in your git provider for peers to review changes before merging into the base branch. - -

- -

- -**Pull changes from remote**: This option will be available if you’re on the local base branch and changes have recently been pushed to the remote version of the branch. As such, pulling in changes from the remote repo will allow you to pull in the most recent version of the base branch. - -

- -

- -**Pull changes from master/main**: This option will be available if you are on any local branch that is behind the remote version of the base branch or the remote version of the branch that you're currently on. diff --git a/website/docs/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide.md b/website/docs/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide.md index 45256e7a6e1..8e219fe1c2f 100644 --- a/website/docs/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide.md +++ b/website/docs/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide.md @@ -3,7 +3,7 @@ title: Viewing Docs in the IDE id: viewing-docs-in-the-ide --- -The dbt Cloud IDE makes it possible to view [documentation](/building-a-dbt-project/documentation) +The dbt Cloud IDE makes it possible to view [documentation](/docs/collaborate/documentation) for your dbt project while your code is still in development. With this workflow, you can inspect and verify what your project's generated documentation will look like before your changes are released to production. @@ -14,10 +14,9 @@ To generate documentation in the IDE, run the `dbt docs generate` command in the Command Bar in the IDE. This command will generate the Docs for your dbt project as it exists in development in your IDE session. - + After generating your documentation, you can click the "view docs" button to see the latest version of your documentation rendered in a new browser window. - - + diff --git a/website/docs/docs/dbt-cloud/cloud-overview.md b/website/docs/docs/dbt-cloud/cloud-overview.md index 88edff6853f..3a436e92b5a 100644 --- a/website/docs/docs/dbt-cloud/cloud-overview.md +++ b/website/docs/docs/dbt-cloud/cloud-overview.md @@ -4,45 +4,43 @@ id: "cloud-overview" --- -[dbt Cloud](https://www.getdbt.com/product/) is a hosted service that helps data analysts and engineers productionize dbt deployments. It comes equipped with turnkey support for scheduling jobs, CI/CD, serving documentation, monitoring & alerting, and an Integrated Developer Environment (IDE). +[dbt Cloud](https://www.getdbt.com/product/) is a hosted service that helps data analysts and engineers productionize dbt deployments. It comes equipped with turnkey support for scheduling jobs, CI/CD, serving documentation, monitoring & alerting, and an integrated developer environment (IDE). dbt Cloud’s generous Developer (free) plan and deep integration with dbt Core make it well suited for data teams small and large alike. -You can get started with dbt Cloud by [Signing up](https://www.getdbt.com/signup/). +You can [sign up](https://www.getdbt.com/signup/) to get started with dbt Cloud. ## Develop dbt projects -Using the [dbt IDE](the-dbt-ide), you can develop, run, and version control dbt projects on the web. - - +You can use the [dbt Cloud IDE](/docs/get-started/develop-in-the-cloud) to develop, run, and version control dbt projects on the web. ## Schedule and run dbt jobs in production Set up custom schedules to run your production dbt jobs. dbt Cloud's comprehensive scheduling interface makes it possible to schedule jobs by day of week, time of day, or a recurring interval. - + ## Democratize access to logs -dbt Cloud makes it easy to view in-progress and historical logs for your dbt runs. From Cloud's web interface, you can view and download the run logs for your dbt invocations. If you're happy ssh'ing into a cron server and running `tail -f` on a logfile, then this feature is *not* for you! +dbt Cloud makes it easy to view in-progress and historical logs for your dbt runs. From dbt Cloud, you can view and download the run logs for your dbt invocations. If you're happy ssh'ing into a cron server and running `tail -f` on a logfile, then this feature is *not* for you! - + ## Generate and distribute documentation -dbt Cloud hosts and authorizes access to dbt project documentation. After enabling documentation for a given job, you can click the "View Documentation" button to see the latest documentation for that job. Because these docs are generated on a schedule, they're always up to date! Simply invite your coworkers to dbt Cloud to share your project's documentation with the rest of your team. More info about enabling docs for your jobs can be found [here](cloud-generating-documentation). +dbt Cloud hosts and authorizes access to dbt project documentation. After enabling documentation for a given job, you can click the "View Documentation" button to see the latest documentation for that job. Because these docs are generated on a schedule, they're always up to date! Simply invite your coworkers to dbt Cloud to share your project's documentation with the rest of your team. More info about enabling docs for your jobs can be found [here](/docs/collaborate/cloud-build-and-view-your-docs). - + -## Enable Continuous Integration +## Enable Continuous Integration :::info Available on the Basic Tier -Continuous Integration functionality is available to accounts on the Basic Tier or higher. +Continuous integration functionality is available to accounts on the Basic Tier or higher. ::: -dbt Cloud can be configured to run your dbt projects in a temporary schema when new commits are pushed to open Pull Requests. When the Cloud job completes, a status will be shown for the PR inside of GitHub. This build-on-PR functionality is a great way to catch bugs before deploying to production, and an essential tool in any analysts belt. More info on enabling CI workflows in dbt Cloud can be found [here](cloud-enabling-continuous-integration). +dbt Cloud can be configured to run your dbt projects in a temporary schema when new commits are pushed to open pull requests. When the Cloud job completes, a status will be shown for the PR inside of GitHub. This build-on-PR functionality is a great way to catch bugs before deploying to production, and an essential tool in any analysts belt. More info on enabling CI workflows in dbt Cloud can be found [here](/docs/deploy/cloud-ci-job). diff --git a/website/docs/docs/dbt-cloud/cloud-quickstart.md b/website/docs/docs/dbt-cloud/cloud-quickstart.md deleted file mode 100644 index b756c8f5d59..00000000000 --- a/website/docs/docs/dbt-cloud/cloud-quickstart.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -title: "dbt Cloud Quickstart" -id: "cloud-quickstart" ---- - - -:::info dbt Cloud - -The functionality documented here is available in dbt Cloud. Don't have an account? You can get started for free [here](https://www.getdbt.com/signup). - -::: - -# Set Up A Project -When creating a new dbt Cloud account, you will be prompted to move through the Project setup flow. A project is made up of a Connection and Repository and can contain multiple Environments within it. - -Let's get started! - -## Create a connection - -dbt Cloud uses this connection to _connect_ to your database when running jobs and transformation queries. Depending on the type of data warehouse you're using, you'll need to supply [different configuration parameters](docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database). dbt Cloud natively supports connections to Snowflake, BigQuery, Redshift, Apache Spark, Databricks, and Postgres. - -After picking a data warehouse type, a form will be generated where you can populate your warehouse's credentials. These credentials are encrypted at rest, and dbt Cloud never stores credentials in plaintext. - - - -## Connect a repository - -dbt Cloud plugs directly into your version control system (GitHub, GitLab, BitBucket, etc) to pull the latest version of your dbt project. - -If you've [installed the dbt Cloud application in your GitHub account](cloud-installing-the-github-application), then you will be able to select a repo from your GitHub org using this interface. If you're not using GitHub, or if you haven't installed the integration yet, you can optionally connect a git repository by [providing a git URL](cloud-import-a-project-by-git-url) from the "Git URL" tab. - -You’ll find the “Managed” tab useful if you do not already have a git repository for your dbt project. With this option, you can let [dbt Cloud manage a repository](cloud-using-a-managed-repository) for you. - - - - -## Create an environment - -Environments specify the information required to run dbt for your project. New dbt Cloud accounts will automatically be created with a Development environment during setup. For more information about configuring this Development environment, see [creating a development environment](using-the-dbt-ide#creating-a-development-environment). - -Scheduled dbt jobs can be configured in Deployment environments. These deployment environments must be configured with deployment credentials. Unlike Development credentials which run with the permissions of a human being, deployment credentials should be configured with a service account database user. We recommend configuring deployment environments to run as a user named dbt_cloud (or similar) in your database. - -To create an Environment, click the hamburger button in the upper left-hand corner of the Cloud UI and select Environments from the menu. You’ll then click the “New Environment” button where you can specify the dbt version and custom branch (if applicable) that dbt Cloud should use to build your project. Additionally, the deployment credentials for the Environment can be configured on this page. - - - -## Create a new job - -Now that dbt Cloud is able to clone your dbt repo and connect to your warehouse, you're ready to create a new job! To create a job, navigate to the Jobs page from the left sidebar, then click the "New Job" button. In the job creation interface, you can specify the environment that the job should use, as well as commands and configuration for your new job. - - - -Job schedules can be configured from the job creation interface. You can either schedule your job using the visual editor, or you can enter a custom cron syntax for your job. - - - -## Alternatives - -If you are interested in trialing dbt Cloud with Snowflake, you can use [Snowflake Partner Connect](https://docs.snowflake.com/en/user-guide/ecosystem-partner-connect.html) to spin up a dbt Cloud account with all of the key objects created (deployment and development environments, git repository, and sample job). All you need is a Snowflake Account with access to the ACCOUNTADMIN role to go into Partner Connect and find the dbt tile to set up a dedicated environment for test driving. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-api/cloud-apis.md b/website/docs/docs/dbt-cloud/dbt-cloud-api/cloud-apis.md deleted file mode 100644 index 94bf2cb67d2..00000000000 --- a/website/docs/docs/dbt-cloud/dbt-cloud-api/cloud-apis.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -title: "APIs Overview" -id: "cloud-apis" ---- - -## Overview - -Accounts on the _Team_ and _Enterprise_ plans can query the dbt Cloud APIs. - -dbt Cloud provides two APIs. - -The [dbt Cloud Administrative API](/docs/dbt-cloud/dbt-cloud-api/admin-cloud-api) can be used to administrate a dbt Cloud account. - -The [dbt Metadata API](/dbt-cloud/dbt-cloud-api/metadata/metadata-overview) can be used to fetch metadata related to the state and health of your dbt project. - -## How to Access the APIs - -dbt Cloud supports two types of API Tokens: [user tokens](/dbt-cloud/dbt-cloud-api/user-tokens) and [service account tokens](/dbt-cloud/dbt-cloud-api/service-tokens). Requests to the dbt Cloud APIs can be authorized using these tokens. diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise.md b/website/docs/docs/dbt-cloud/dbt-cloud-enterprise.md deleted file mode 100644 index e545662c2d2..00000000000 --- a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -title: "dbt Cloud Enterprise" -id: "dbt-cloud-enterprise" ---- - -:::info dbt Cloud Enterprise -This section describes features of the dbt Cloud Enterprise plan. If you’re interested in learning more about an Enterprise plan, contact sales@getdbt.com. -::: - -### SSO -- [Setting up SSO with SAML 2.0](setting-up-sso-with-saml-2.0) -- [Setting up SSO with Azure Active Directory](setting-up-enterprise-sso-with-azure-active-directory) -- [Setting up SSO with Google GSuite](setting-up-sso-with-google-gsuite) - -### Authentication -- [Setting up Snowflake OAuth](setting-up-enterprise-snowflake-oauth) - -### Permissions -- [Access Control](/docs/dbt-cloud/access-control/access-control-overview) -- [Enterprise Permissions](/docs/dbt-cloud/access-control/enterprise-permissions) - -### Audit log -- [Using the Audit log](/audit-log) \ No newline at end of file diff --git a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-saml-2.0.md b/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-saml-2.0.md deleted file mode 100644 index db31d58a96b..00000000000 --- a/website/docs/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-saml-2.0.md +++ /dev/null @@ -1,264 +0,0 @@ ---- -title: "Setting up SSO with SAML 2.0" -id: "setting-up-sso-with-saml-2.0" ---- - -:::info Enterprise Feature - -This guide describes a feature of the dbt Cloud Enterprise plan. If you’re interested in learning -more about an Enterprise plan, contact us at sales@getdbt.com. - -::: - -dbt Cloud Enterprise supports single-sign on (SSO) for any SAML 2.0-compliant identity provider (IdP). -Currently supported features include: -* IdP-initiated SSO -* SP-initiated SSO -* Just-in-time provisioning - -This document details the steps to integrate dbt Cloud with an identity -provider in order to configure Single Sign On and [role-based access control](access-control-overview#role-based-access-control). - -## Generic SAML 2.0 integrations - -_Use this section if you are configuring an identity provider besides Okta._ - -### Configuration in your identity provider - -_Note: You'll need administrator access to your SAML 2.0 compliant identity provider to follow this guide. -This approach will work with any SAML 2.0 compliant identity provider._ - -### Creating the application - -First, log into your SAML 2.0 identity provider and create a new application. -When promoted, configure the application with the following details: - - **Platform:** Web - - **Sign on method:** SAML 2.0 - - **App name:** dbt Cloud - - **App logo (optional):** You can optionally [download the dbt logo](https://drive.google.com/file/d/1fnsWHRu2a_UkJBJgkZtqt99x5bSyf3Aw/view?usp=sharing), and use as the logo for this app. - -#### Configuring the application - -:::info - -This guide assumes that your dbt Cloud instance is running at -https://cloud.getdbt.com. If your deployment is running at a different url, then -substitute cloud.getdbt.com for the url of your instance. - -::: - -To complete this section, you will need to create a login slug. This slug controls the URL where users on your account -can log into your application. Login slugs are typically the lowercased name of your organization -separated with dashes. For example, the login slug for _dbt Labs_ would be _dbt-labs_. -Login slugs must be unique across all dbt Cloud accounts, so pick a slug that uniquely identifies your company. - -When prompted for the SAML 2.0 application configurations, supply the following -values: - -- Single sign on URL: `https://cloud.getdbt.com/complete/saml` -- Audience URI (SP Entity ID): `https://cloud.getdbt.com/` -- Relay State: `` - -Additionally, you may configure the IdP attributes passed from your identity -provider into dbt Cloud. We recommend using the following values: - - -| name | name format | value | description | -| ---- | ----------- | ----- | ----------- | -| email | Unspecified | ${user.email} | The user's email address | -| first_name | Unspecified | ${user.first_name} | The user's first name | -| last_name | Unspecified | ${user.last_name} | The user's last name | - -dbt Cloud's [role-based access control](access-control-overview#role-based-access-control) relies -on group mappings from the IdP to assign dbt Cloud users to dbt Cloud groups. To -use role-based access control in dbt Cloud, also configure your identity -provider to provide group membership information in user attribute called -`groups`: - - -| name | name format | value | description | -| ---- | ----------- | ----- | ----------- | -| groups | Unspecified | `` | The groups a user belongs to in the IdP | - -Note: You may use a restricted group attribute statement to limit the groups set -to dbt Cloud for each authenticated user. For example, if all of your dbt Cloud groups start -with `DBT_CLOUD_...`, you may optionally apply a filter like `Starts With: DBT_CLOUD_`. -Please contact support if you have any questions. - -### Collect integration secrets - -After confirming your details, the IdP should show you the following values for -the new SAML 2.0 integration. Keep these values somewhere safe, as you will need -them to complete setup in dbt Cloud. - -- Identity Provider Issuer -- Identity Provider SSO Url -- X.509 Certificate - -### Finish setup - -After creating the Okta application, follow the instructions in the [dbt Cloud Setup](#dbt-cloud-setup) -section to complete the integration. - -## Okta integration - -_Use this section if you are configuring Okta as your identity provider_. - -First, log into your Okta account. Using the Admin dashboard, create a new app. - - - -On the following screen, select the following configurations: -- **Platform**: Web -- **Sign on method**: SAML 2.0 - -Click **Create** to continue the setup process. - - - -### Configure the Okta application - -On the **General Settings** page, enter the following details:: - -* **App name**: dbt Cloud -* **App logo** (optional): You can optionally [download the dbt logo](https://drive.google.com/file/d/1fnsWHRu2a_UkJBJgkZtqt99x5bSyf3Aw/view?usp=sharing), - and upload it to Okta to use as the logo for this app. - -Click **Next** to continue. - - - -### Configure SAML Settings - -On the **SAML Settings** page, enter the following values: - -* **Single sign on URL**: `https://cloud.getdbt.com/complete/okta` -* **Audience URI (SP Entity ID)**: `https://cloud.getdbt.com/` -* **Relay State**: `` - - - -Use the **Attribute Statements** and **Group Attribute Statements** forms to -map your organization's Okta User and Group Attributes to the format that -dbt Cloud expects. - -Expected **User Attribute Statements**: - -| Name | Name format | Value | Description | -| -------------- | ----------- | -------------------- | -------------------------- | -| `email` | Unspecified | `${user.email}` | _The user's email address_ | -| `first_name` | Unspecified | `${user.firstName}` | _The user's first name_ | -| `last_name` | Unspecified | `${user.lastName}` | _The user's last name_ | - - -Expected **Group Attribute Statements**: - -| Name | Name format | Filter | Value | Description | -| -------- | ----------- | ------------- | ----- | ------------------------------------- | -| `groups` | Unspecified | Matches regex | `.*` | _The groups that the user belongs to_ | - - -**Note:** You may use a more restrictive Group Attribute Statement than the -example shown above. For example, if all of your dbt Cloud groups start with -`DBT_CLOUD_`, you may use a filter like `Starts With: DBT_CLOUD_`. **Okta -only returns 100 groups for each user, so if your users belong to more than 100 -IdP groups, you will need to use a more restrictive filter**. Please contact -support if you have any questions. - - - -Click **Next** to continue. - -### Finish Okta setup - -Select *I'm an Okta customer adding an internal app*, and select *This is an -internal app that we have created*. Click **Finish** to finish setting up the -app. - - - -### View setup instructions - -On the next page, click **View Setup Instructions**. In the steps below, -you'll supply these values in your dbt Cloud Account Settings to complete -the integration between Okta and dbt Cloud. - - - - - -### Finish setup - -After creating the Okta application, follow the instructions in the [dbt Cloud Setup](#dbt-cloud-setup) -section to complete the integration. - - -## dbt Cloud Setup - -### Providing IdP values - - -To complete setup, follow the steps below in dbt Cloud. First, navigate to the -**Enterprise > Single Sign On** page under Account Settings. Next, click -the **Edit** button and supply the following SSO details: - -| Field | Value | -| ----- | ----- | -| Log in with | SAML 2.0 | -| Identity Provider SSO Url | Paste the **Identity Provider Single Sign-On URL** shown in the IdP setup instructions | -| Identity Provider Issuer | Paste the **Identity Provider Issuer** shown in the IdP setup instructions | -| X.509 Certificate | Paste the **X.509 Certificate** shown in the IdP setup instructions | -| Slug | Enter your desired login slug. | - - - -Click **Save** to complete setup for the SAML 2.0 integration. - -### Test the integration -After setup is complete, you can navigate to the URL generated for your account's _slug_ to -test logging in with your identity provider. Additionally, users added the the SAML 2.0 app -will be able to log in to dbt Cloud from the IdP directly. - -Users in your IdP will now be able to log into the application by navigating to the URL: - -`https://cloud.getdbt.com/enterprise-login/` - -### Setting up RBAC -After configuring an identity provider, you will be able to set up [role-based -access control](/access-control/enterprise-permissions) for your account. diff --git a/website/docs/docs/dbt-cloud/deployments/deployment-overview.md b/website/docs/docs/dbt-cloud/deployments/deployment-overview.md deleted file mode 100644 index 751198b6e29..00000000000 --- a/website/docs/docs/dbt-cloud/deployments/deployment-overview.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -id: deployment-overview -title: Overview ---- - -:::info Deployments - -This section provides an overview of the deployment types available for dbt Cloud. -If you’re interested in learning more about an Enterprise plan, contact us at sales@getdbt.com. - -::: - -In general, the dbt Cloud application deployment models fall into two categories: **Multi Tenant** and **Single Tenant**. These deployments are hosted on infrastructure managed by dbt Labs. Both models leverage AWS infrastructure as described in the [Architecture](deployment-architecture) section. - -For more information on these deployment models view the below pages. - -- [Multi Tenant](multi-tenant-deployment) -- [Single Tenant](single-tenant-deployment) diff --git a/website/docs/docs/dbt-cloud/january-2020-pricing-updates.md b/website/docs/docs/dbt-cloud/january-2020-pricing-updates.md deleted file mode 100644 index a548d8b2a3d..00000000000 --- a/website/docs/docs/dbt-cloud/january-2020-pricing-updates.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -title: "January 2020 - Pricing changes" -id: "january-2020-pricing-updates" ---- - -On January 14th, 2020, existing self-service dbt Cloud accounts will be migrated to one of the new pricing plans shown below. dbt Cloud account owners should receive emails containing more information about how this migration impacts their account. If you have not received such an email, please contact support at support@getdbt.com. - -# New Plans - -dbt Cloud will support two self-service plans: **Developer** and **Team**. Enterprise clients are not impacted by this change. - -## Pricing changes - -Please be aware of the following changes to dbt Cloud: - - Accounts will no longer be charged for Users designated as "read only" - - Accounts will no longer be charged for configured run slots - - Standard users will be automatically migrated to Developers on January 14th, 2020. The license types available in dbt Cloud will be **Read Only** and **Developer** after January 14th. - - You may reconfigure existing Standard users on your account to be Read Only in advance of January 14. - - Please contact support at support@getdbt.com if you have any questions about how these pricing changes affect your account. - -## The Developer Plan - -The Developer plan provides a best-in-class analytics engineering workflow for data teams of one. The Developer plan includes: - - 1 Developer license - - 1 Run Slot - - [The dbt IDE](the-dbt-ide) - - Unlimited runs per day (previously Paid-only) - - [PR builds](cloud-enabling-continuous-integration-with-github) (previously Paid-only) - -Price: **Free** - -## The Team Plan - -The Team plan is built for teams collaborating on the analytics engineering workflow. The Team plan includes: -- Everything in the Developer Plan, plus -- 5 run slots -- 50 read only seats -- API Access - -Price: **$50/developer/month** diff --git a/website/docs/docs/dbt-cloud/on-premises/dependencies.md b/website/docs/docs/dbt-cloud/on-premises/dependencies.md index de07556fb06..6278612476b 100644 --- a/website/docs/docs/dbt-cloud/on-premises/dependencies.md +++ b/website/docs/docs/dbt-cloud/on-premises/dependencies.md @@ -33,7 +33,7 @@ During initial installation, the KOTS appliance can be directly downloaded from - `quay.io`: Some dependencies of Replicated are hosted as public images in the Quay.io registry. - `hub.docker.com`: Some dependencies of Replicated are hosted as public images in Docker Hub. -Replicated maintains a list of Replicated-owned IPs for IP access restriction purposes at https://github.com/replicatedhq/ips/blob/master/ip_addresses.json. +Replicated maintains a list of Replicated-owned IPs for IP access restriction purposes at https://github.com/replicatedhq/ips/blob/main/ip_addresses.json. #### dbt Cloud Appliance Installation and Upgrades @@ -61,7 +61,7 @@ To install the dbt Cloud appliance or perform updates, some external connections ``` -Replicated maintains a list of Replicated-owned IPs for IP access restriction purposes at https://github.com/replicatedhq/ips/blob/master/ip_addresses.json. +Replicated maintains a list of Replicated-owned IPs for IP access restriction purposes at https://github.com/replicatedhq/ips/blob/main/ip_addresses.json. #### Ongoing Access @@ -98,6 +98,6 @@ The required inbound ports are: For additional information related to inbound traffic view the following sections. -- [Application Data Flows](/docs/dbt-cloud/deployments/deployment-architecture#application-data-flows) -- [Data Warehouse Interaction](/docs/dbt-cloud/deployments/deployment-architecture#data-warehouse-interaction) -- [Customer Managed Network Architecture](/docs/dbt-cloud/deployments/deployment-architecture#customer-managed-general-network-architecture) +- [Application Data Flows](docs/deploy/architecture#application-data-flows) +- [Data Warehouse Interaction](/docs/deploy/architecture#data-warehouse-interaction) +- [Customer Managed Network Architecture](/docs/deploy/architecture) diff --git a/website/docs/docs/dbt-cloud/on-premises/installation.md b/website/docs/docs/dbt-cloud/on-premises/installation.md index b8fbc58b2ed..9ba856b27f4 100644 --- a/website/docs/docs/dbt-cloud/on-premises/installation.md +++ b/website/docs/docs/dbt-cloud/on-premises/installation.md @@ -3,9 +3,9 @@ id: installation title: On-Premises Installation (dbt Cloud) --- -:::note +:::note 📌 -We longer support new on-premises deployments, and instead have moved to a [Single Tenant](single-tenant) model hosted in the cloud +We no longer support new on-premises deployments, and instead have moved to a [Single Tenant](/docs/deploy/single-tenant) model hosted in the cloud ::: diff --git a/website/docs/docs/dbt-cloud/on-premises/setup.md b/website/docs/docs/dbt-cloud/on-premises/setup.md index 1e0d9bf0516..654722192ef 100644 --- a/website/docs/docs/dbt-cloud/on-premises/setup.md +++ b/website/docs/docs/dbt-cloud/on-premises/setup.md @@ -79,13 +79,13 @@ After logging in, you can create a new account and invite members of your team, Accounts and Users are separate constructs that can have a many-to-many relationship. When creating a new Account, you can add either existing or new users to the account. If one user has access to multiple accounts, they will be able to switch accounts from the dbt Cloud frontend upon login. -Each user can have a specific role on each account. For more information on each role, please see the docs on [managing permissions](access-control-overview) +Each user can have a specific role on each account. For more information on each role, please see the docs on [managing permissions](/docs/collaborate/manage-access/about-access) ### Deploying Application Updates A new version of dbt Cloud will appear on the Version History page in your Configuration Console anytime any of the following happen: -- A new version of the dbt Cloud code is released. This typically happens every two weeks, and each new version will be accompanied by a [changelog](/docs/dbt-cloud/cloud-changelog). +- A new version of the dbt Cloud code is released. This typically happens every two weeks, and each new version will be accompanied by a [changelog](/docs/dbt-versions/dbt-cloud-release-notes). - Any configuration change is applied to your application via the Configuration Console. - Anytime an edit is applied to your Kubernetes configs via the overlays mechanism built into kots. @@ -124,4 +124,4 @@ kubectl patch deployment app -p \ "{\"spec\":{\"template\":{\"metadata\":{\"annotations\":{\"date\":\"`date +'%s'`\"}}}}}" ``` -After these steps, you are ready to manage your Github integration. Your users can log into dbt Cloud, and navigate to Profile > Integrations to start connecting your Github account to dbt Cloud. See [Github](cloud-installing-the-github-application) for more details on how your users can start using the integration. +After these steps, you are ready to manage your Github integration. Your users can log into dbt Cloud, and navigate to Profile > Integrations to start connecting your GitHub account to dbt Cloud. See [GitHub](/docs/collaborate/git/connect-github) for more details on how your users can start using the integration. diff --git a/website/docs/docs/dbt-cloud/on-premises/usage-statistics.md b/website/docs/docs/dbt-cloud/on-premises/usage-statistics.md index ad143e48b6e..aa7759aa4b1 100644 --- a/website/docs/docs/dbt-cloud/on-premises/usage-statistics.md +++ b/website/docs/docs/dbt-cloud/on-premises/usage-statistics.md @@ -21,7 +21,7 @@ Usage statistics are tracked once weekly, and include the following information: - The number of developer and read only licenses utilized in each account - The version of dbt Cloud installed in the on-premises environment -This information is sent as a JSON payload to usage.getdbt.com. A typical +This information is sent as a payload to usage.getdbt.com. A typical payload looks like: ```json diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud.md b/website/docs/docs/dbt-cloud/using-dbt-cloud.md deleted file mode 100644 index c0779a5e3f7..00000000000 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -title: "Using dbt Cloud" -id: "using-dbt-cloud" ---- - -The following sections will help you set configure resources in your Cloud account: - -- [Enabling Continuous Integration](cloud-enabling-continuous-integration-with-github) -- [Generating Documentation](cloud-generating-documentation) -- [Snapshotting source freshness](cloud-snapshotting-source-freshness) -- [Building and Configuring Artifacts](artifacts) -- [Using a custom cron schedule](cloud-using-a-custom-cron-schedule) -- [Setting a custom target name](cloud-setting-a-custom-target-name) \ No newline at end of file diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/artifacts.md b/website/docs/docs/dbt-cloud/using-dbt-cloud/artifacts.md index 65133b96e11..40d5a2560ff 100644 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/artifacts.md +++ b/website/docs/docs/dbt-cloud/using-dbt-cloud/artifacts.md @@ -4,24 +4,28 @@ id: "artifacts" description: "Use artifacts to power your automated docs site and source freshness data." --- -## dbt Cloud Artifacts +When running dbt jobs, dbt Cloud generates and saves *artifacts*. You can use these artifacts, like `manifest.json`, `catalog.json`, and `sources.json` to power different aspects of dbt Cloud, namely: [dbt Docs](documentation) and [source freshness reporting](/docs/build/sources#snapshotting-source-data-freshness). -When dbt Cloud runs dbt jobs, it generates and saves *artifacts*. These artifacts, like `manifest.json`, `catalog.json`, and `sources.json` are used to power different aspects of dbt Cloud, namely: [dbt Docs](documentation) and [source freshness reporting](cloud-snapshotting-source-freshness). +## Create dbt Cloud Artifacts -While every dbt Cloud job will produce artifacts, typically there is only one production job for a given project. If you select a production job on the Project Settings page, dbt Cloud will render links to the production Documentation and Source Freshness artifacts generated for that project in the nav sidebar. +While running any job can produce artifacts, you should only associate one production job with a given project to produce the project's artifacts. You can designate this connection in the **Project details** page. To access this page, click the gear icon in the upper right, select **Account Settings**, select your project, and click **Edit** in the lower right. Under **Artifacts**, select the jobs you want to produce documentation and source freshness artifacts for. - + -### Documentation +If you don't see your job listed, you might need to edit the job and select **Run source freshness** and **Generate docs on run**. + + -Once a job has been selected from the Documentation drop-down menu in your project settings, dbt Cloud's left-hand navbar will update to include a link to documentation for this job. This link will always point to the latest version of the documentation for your account! +When you add a production job to a project, dbt Cloud updates the content and provides links to the production documentation and source freshness artifacts it generated for that project. You can see these links by clicking **Deploy** in the upper left, selecting **Jobs**, and then selecting the production job. From the job page, you can select a specific run to see how artifacts were updated for that run only. +### Documentation +When set up, dbt Cloud updates the **Documentation** link in the upper left so it links to documentation for this job. This link always points to the latest version of the documentation for your account! ### Source Freshness -As with Documentation, configuring a job for the Source Freshness artifact setting will update dbt Cloud's left-hand navbar to include a link to the latest Source Freshness report for the selected job. +As with Documentation, configuring a job for the Source Freshness artifact setting also updates the Data Sources link under **Deploy**. The new link points to the latest Source Freshness report for the selected job. diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github.md b/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github.md deleted file mode 100644 index e2afc0ca0db..00000000000 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github.md +++ /dev/null @@ -1,121 +0,0 @@ ---- -title: "Enabling CI" -id: "cloud-enabling-continuous-integration" -description: "You can enable CI to test every single change prior to deploying the code to production just like in a software development workflow." ---- - -## Overview - -dbt Cloud makes it easy to test every single change you make prior to deploying that code into production. Once you've [connected your GitHub account](cloud-installing-the-github-application) or [your GitLab account](connecting-gitlab), you can configure jobs to run when new Pull Requests (referred to as Merge Requests in GitLab) are opened against your dbt repo. When these jobs complete, their statuses will be shown directly in the Pull Request. This makes it possible to deploy new code to production with confidence. - -:::info Draft Pull Requests - -Jobs will _not_ be triggered by draft Pull Requests. If you would like jobs to run on each new commit, please mark your Pull Request as `Ready for review`. - -::: - -:::info GitLab Webhooks Compatability and Availability - -If you previously configured your dbt project by providing a GitLab git URL, you need to reconfigure the repository to connect through [native GitLab authentication](connecting-gitlab), as we cannot enable webhooks for your project through SSH. - -GitLab Webhooks are available to only GitLab users who have a paid or self-hosted GitLab account. - -::: - -GitHub Example: - - -GitLab Example: - - -## Enabling Continuous Integration (CI) - -To enable runs on Pull Requests, navigate to the Job Settings page for the relevant job. In the "Triggers" section of the page, switch to the "Webhooks" tab, and then click the checkbox next to `RUN ON PULL REQUESTS?` as shown below. - - - - -## Understanding CI in dbt Cloud - -When Pull Request builds are enabled, dbt Cloud will listen for webhooks from GitHub or GitLab indicating that a new PR has been opened or updated with new commits. When one of these webhooks is received, dbt Cloud will enqueue a new run of the specified job. Crucially, this run will be configured to build into a special, [temporary schema](building-models/using-custom-schemas) using the prefix `dbt_cloud`. The name of these temporary schemas will be unique for each PR, and is shown in the Run Details page for the given run. This temporary schema will allow you to inspect the relations built by dbt Cloud, directly in your data warehouse. - - - -When the run is complete, dbt Cloud will update the PR in GitHub or MR in GitLab with a status message indicating the results of the run. Once the PR is closed or merged, dbt Cloud will delete the temporary schema from your data warehouse. - -## Slim CI - -With Slim CI, you don't have to rebuild and test all your models. You can instruct dbt Cloud to run jobs on only modified or new resources. - -When creating or editing a job in dbt Cloud, you can set your execution settings to defer to a previous run state. Use the drop menu to select which **production** job you want to defer to. - - - -When a job is selected, dbt Cloud will surface the artifacts from that job's most recent successful run. dbt will then use those artifacts to determine the set of new and modified resources. In your job commands, you can signal to dbt to run only on these modified resources and their children by including the `state:modified+` argument. - -As example: - -``` -dbt seed --select state:modified+ -dbt run --select state:modified+ -dbt test --select state:modified+ -``` - -Because dbt Cloud manages deferral and state environment variables, there is no need to specify `--defer` or `--state` flags. **Note:** Both jobs need to be running dbt v0.18.0 or newer. - - -To learn more about state comparison and deferral in dbt, read the docs on [state](understanding-state). - -## Fresh Rebuilds - -As an extension of the Slim CI feature, dbt Cloud can rerun and retest only the things that are fresher compared to a previous run. - - - -Only supported by v1.1 or newer. - - - - - -Only supported by v1.1 or newer. - -:::caution Experimental functionality -The `source_status` selection is experimental and subject to change. During this time, ongoing improvements may limit this feature’s availability and cause breaking changes to its functionality. -::: - -When a job is selected, dbt Cloud will surface the artifacts from that job's most recent successful run. dbt will then use those artifacts to determine the set of fresh sources. In your job commands, you can signal to dbt to run and test only on these fresher sources and their children by including the `source_status:fresher+` argument. This requires both previous and current state to have the `sources.json` artifact be available. Or plainly said, both job states need to run `dbt source freshness`. - -As example: -```bash -# Command step order -dbt source freshness -dbt build --select source_status:fresher+ -``` - - -More example commands in [Pro-tips for workflows](/docs/guides/best-practices.md#pro-tips-for-workflows). - -## Troubleshooting - -### Reconnecting your dbt project to use a native GitHub or GitLab repo - -If your dbt project relies on a Git URL and SSH to connect to your GitHub or GitLab repo, you need to disconnect your repo and reconnect it with native Github or GitLab auth in order to enable webhooks. - -First, make sure you have [native GitHub authentication](cloud-installing-the-github-application) or [native GitLab authentication](connecting-gitlab) set up. After you have gone through those steps, head to `Account Settings` -> `Projects` and click on the project you'd like to reconnect through native GitHub or GitLab auth. Then click on the repository link. - -Once you're in the repository page, you can click the `Edit` button and then the `Disconnect Repository` button at the bottom. - - -Confirm that you'd like to disconnect your repository. You should then see a new `Configure a repository` link in your old repository's place. Click through to the configuration page, which will look like this: - - - -Select the `GitHub` or `GitLab` tab and reselect your repository. That should complete the setup and enable you to use webhooks in your jobs configuration. - -### Error messages that refer to schemas from previous PRs - -If you receive a schema-related error message referencing a *previous* PR, this is usually an indicator that you are not using a production job for your deferral and are instead using *self*. If the prior PR has already been merged, the prior PR's schema may have been dropped by the time the Slim CI job for the current PR is kicked off. - -To fix this issue, select a production job run to defer to instead of self. diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation.md b/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation.md deleted file mode 100644 index eece7fc2208..00000000000 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -title: "Generating documentation" -id: "cloud-generating-documentation" -description: "Automatically generate project documentation as you run jobs." ---- - -## Enabling documentation - -Documentation can be enabled for a job in dbt Cloud by checking the "Generate docs?" checkbox on the Job Settings page, as shown below. Once this setting is enabled, subsequent runs of the job will automatically include a step to generate documentation. - - - -## Viewing documentation - -To view the latest documentation for a job, click the "View Latest Documentation" button from the Job Details page. This link will always navigate you to the most recent version of your project's documentation in dbt Cloud. - - - -## Easy access - -See [Building and Configuring Artifacts](artifacts) for more information on exposing links to the latest Documentation and Source Freshness reports to your team. \ No newline at end of file diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-metrics-layer.md b/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-metrics-layer.md deleted file mode 100644 index ca4d4e2f007..00000000000 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-metrics-layer.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -title: "What is the dbt metrics layer?" -id: "cloud-metrics-layer" -description: "The dbt metrics layer helps you standardize metrics within your organization." -keywords: - - dbt metrics layer ---- - -[dbt metrics](/docs/building-a-dbt-project/metrics) ensure metric consistency and provide a way to standardize metrics under version control in dbt projects. By abstracting metrics calculations out of pre-aggregated tables or specific business intelligence tools (BI tools), dbt metrics can be defined once and used everywhere. Defining metrics in one place ensures consistent reporting of key business metrics especially in an environment where metric definitions and dimensions are changing along with your business. - -The data models that power these metrics already exist in your dbt project. You can use dbt metrics in different ways: - -* High-level view of your most important key performance indicators (KPIs), such as weekly active users, revenue, or time-on-site. -* Drilled-down view of the core entities that comprise those metrics, such as users, orders, or pageviews. - -While dbt does not currently provide a BI experience for exploring these metrics, we’re working on a number of integrations with BI partners that will help unlock the full value of the metrics layer. - -If you’re interested in taking metrics for a spin or integrating your product with dbt metrics, you can [sign up for the beta!](https://docs.google.com/forms/d/1MjVfD3rLg2hpjEbOaaocnjGtUdNY-wNpoyy1aHL_x9o/viewform) diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab.md b/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab.md index a26779b93cb..8f09d9dfb34 100644 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab.md +++ b/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab.md @@ -14,6 +14,8 @@ Accessed via the "run detail" page in dbt Cloud, the model timing dashboard disp This is a very visual way to explore your run and surface model bottlenecks. Longest running models *may* be ripe for further exploration -- which can lead to refactoring or reducing run cadence. -Note that this is currently only available on multi-tenant Team and Enterprise accounts. +Notes: +- The model timing dashboard is currently only available on multi-tenant Team and Enterprise accounts. +- The model timing dashboard can only be viewed for jobs that have successfully completed. - + diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-notifications.md b/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-notifications.md deleted file mode 100644 index 1ab34adb35d..00000000000 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-notifications.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: "Configuring notifications" -description: "Set up notifications in dbt Cloud to receive Email or Slack alerts for job run status." ---- - -### Overview - -Setting up notifications in dbt Cloud will allow you to receive alerts via Email or a chosen Slack channel when a job run succeeds, fails, or is cancelled. - -### Email - -There are two options for setting up email notifications. As a **user**, you can set up email notifications for yourself under your Profile. As an **admin**, you can set up notifications on your team members' behalf. - -* **As a user:** Navigate to your **Profile** (found in the top right-hand side of dbt Cloud) and select **Notifications**. -\ -Next select **Edit** and select the type of Notification (Succeeds, Fails, or Is Cancelled) for each Job for which you would like to be notified. - -* **As an admin:** Navigate to Account Settings (in the hamburger menu) > Notifications. -Select the User you'd like to set notifications for. Select **Edit** and select the type of Notification (Succeeds, Fails, or Is Cancelled) for each Job for which they will be notified. - -Finally press **Save**. - - - -### Slack - -**Note**: Currently, Slack notifications can only be configured by one user to one Slack channel. Additionally, you must be an admin of the Slack workspace in order to configure Slack notifications. - -In general, there are two parts to setting up Slack notifications. The first involves setting up the dbt Cloud integration with Slack, while the second involves setting up the notifications themselves. - - - If you **haven't** set up the integration: Watch the entire video below! - - - If you **have** set up the integration: Feel free to skip ahead to 0:40 in the video. - - \ No newline at end of file diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications.md b/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications.md deleted file mode 100644 index 643f5b60d49..00000000000 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: "Configuring Slack notifications" -description: "Report status alerts in a designated Slack channel every time you run a job." ---- - -Setting up Slack notifications in dbt Cloud will allow you to receive alerts in a chosen Slack channel when a job run succeeds, fails, or is cancelled. - -**Note**: Currently, Slack notifications can only be configured by one user to one Slack channel. Additionally, you must be an admin of the Slack workspace in order to configure Slack notifications. - -In general, there are two parts to setting up Slack notifications. The first involves setting up the dbt Cloud integration with Slack, while the second involves setting up the notifications themselves. - - - If you **haven't** set up the integration: Watch the entire video below! - - - If you **have** set up the integration: Feel free to skip ahead to 0:40 in the video. - - diff --git a/website/docs/docs/dbt-cloud/cloud-dbt-cloud-support.md b/website/docs/docs/dbt-support.md similarity index 67% rename from website/docs/docs/dbt-cloud/cloud-dbt-cloud-support.md rename to website/docs/docs/dbt-support.md index bccc6c4db56..a24b00ffb90 100644 --- a/website/docs/docs/dbt-cloud/cloud-dbt-cloud-support.md +++ b/website/docs/docs/dbt-support.md @@ -1,12 +1,16 @@ --- -title: "dbt Cloud Support" -id: "cloud-dbt-cloud-support" +title: "dbt support" +id: "dbt-support" --- -Welcome to dbt Cloud Support! +## dbt Core support -Our purpose is to assist dbt Cloud users as they work through implementing and utilizing dbt Cloud at their organizations. Have a question you can't find an answer to in [our docs](https://docs.getdbt.com/), [dbt discourse](https://discourse.getdbt.com/), or [Stack Overflow](https://stackoverflow.com/questions/tagged/dbt)? dbt Support is here to `dbt help` you! -Check out our guide on [getting help](https://docs.getdbt.com/docs/guides/getting-help) - half of the problem is often knowing where to look... and how to ask good questions! +If you're developing in the command line (CLI) and have questions or need some help — reach out to the helpful dbt community through [the Community Forum](https://discourse.getdbt.com/) or [dbt Community slack](https://www.getdbt.com/community/join-the-community/). + +## dbt Cloud support + +We want to help you work through implementing and utilizing dbt Cloud at your organization. Have a question you can't find an answer to in [our docs](https://docs.getdbt.com/) or [the Community Forum](https://discourse.getdbt.com/)? dbt Support is here to `dbt help` you! +Check out our guide on [getting help](/guides/legacy/getting-help) - half of the problem is often knowing where to look... and how to ask good questions! Types of questions dbt Support will assist you with: - **How do I...** - set up a dbt Cloud project? @@ -32,7 +36,7 @@ Types of questions dbt Support will assist you with: error near line 9` -**For dbt Cloud Enterprise accounts** +### dbt Cloud Enterprise accounts Types of questions you should ask your Solutions Architect and Sales Director: - How should we think about setting up our dbt projects, environments, and jobs based on our company structure and needs? diff --git a/website/docs/docs/core-versions.md b/website/docs/docs/dbt-versions/core-versions.md similarity index 75% rename from website/docs/docs/core-versions.md rename to website/docs/docs/dbt-versions/core-versions.md index 986d356ddd2..3ba6bb75375 100644 --- a/website/docs/docs/core-versions.md +++ b/website/docs/docs/dbt-versions/core-versions.md @@ -1,22 +1,22 @@ --- title: "About dbt Core versions" -id: "core-versions" +id: "core" description: "Learn about semantic versioning for dbt Core, and how long those versions are supported." --- -dbt Core releases follow [semantic versioning](https://semver.org/). The policies and expectations on this page assume prior familiarity with semantic versions. For more on how we use semantic versions, see "[How dbt Core uses semantic versioning](#how-dbt-core-uses-semantic-versioning)." +dbt Core releases follow [semantic versioning](https://semver.org/). The policies and expectations on this page assume prior familiarity with semantic versions. For more on how we use semantic versions, see [How dbt Core uses semantic versioning](#how-dbt-core-uses-semantic-versioning). ### Further reading -- To learn how you can use dbt Core versions in dbt Cloud, see "[Choosing a dbt Core version](cloud-choosing-a-dbt-version)." -- To learn about installing dbt Core, see "[How to install dbt Core](dbt-cli/install/overview)." +- To learn how you can use dbt Core versions in dbt Cloud, see [Choosing a dbt Core version](/docs/dbt-versions/upgrade-core-in-cloud). +- To learn about installing dbt Core, see "[How to install dbt Core](/docs/get-started/installation)." - To restrict your project to only work with a range of dbt Core versions, or use the currently running dbt Core version, see [`require-dbt-version`](require-dbt-version) and [`dbt_version`](dbt_version). ## Version support prior to v1.0 - We are no longer releasing new patches for minor versions prior to v1.0. - As of June 30, 2022, dbt Cloud will remove support for dbt Core versions older than v1.0. At that point, we will also remove v0.20 and v0.21 from the version dropdown on this website. -- You can read the [specific version migration guides](/docs/guides/migration-guide) to understand changes to each version. Each migration guide will link to pages of documentation that were added or updated. Those pages of documentation will also include "Changelog" notes, which you can toggle to see notes on specific changes from each older version. +- You can read the [specific version migration guides](/guides/migration/versions) to understand changes to each version. Each migration guide will link to pages of documentation that were added or updated. Those pages of documentation will also include "Changelog" notes, which you can toggle to see notes on specific changes from each older version. ## Version support starting with v1.0 @@ -25,7 +25,7 @@ dbt Core releases follow [semantic versioning](https://semver.org/). The policie Minor versions include new features and capabilities. They will be supported for one year (12 months) from the date of their initial release. _This is a definite commitment._ Our mechanism for continuing to support a minor version is by releasing new patches: small, targeted bug fixes. Whenever we refer to a minor version, such as v1.0, we always mean its latest available patch release (v1.0.x). While a minor version is officially supported: -- You can use it in dbt Cloud. For more on dbt Cloud versioning, see "[Choosing a dbt version](cloud-choosing-a-dbt-version)." +- You can use it in dbt Cloud. For more on dbt Cloud versioning, see [Choosing a dbt version](cloud-choosing-a-dbt-version). - You can select it from the version dropdown on this website, to see documentation that is accurate for use with that minor version. ### Ongoing patches @@ -44,6 +44,10 @@ We aim to release a new minor "feature" every 3 months. _This is an indicative t +:::warning ⚠️ v0.X Non-Supported Period +We are giving accounts until the end of June 2022 to upgrade to dbt 1.0 or later. Pre-dbt 1.0 versions will not receive patch fixes, and our support team will no longer assist with dbt version specific help on non-supported versions of dbt. Additionally, jobs running dbt versions prior to 1.0 will start experiencing service disruptions before the end of the year 2022 and will be removed from the dbt Cloud context by end of the year 2022. You will receive additional email and in app notification before disruption to your production jobs. +::: + ## Best practices for upgrading Because of our new version practice, we've outlined best practices and expectations for dbt users to upgrade as we continue to release new versions of dbt Core. @@ -68,9 +72,13 @@ Like many software projects, dbt Core releases follow [semantic versioning](http - **Minor versions**, also called "feature" releases, include a mix of new features, behind-the-scenes improvements, and changes to existing capabilities that are **backwards compatible** with previous minor versions. They will not break code in your project that relies on documented functionality. - **Patch versions**, also called "bugfix" or "security" releases, include **fixes _only_**. These fixes could be needed to restore previous (documented) behavior, fix obvious shortcomings of new features, or offer critical fixes for security or installation issues. We are judicious about which fixes are included in patch releases, to minimize the surface area of changes. +We are committed to avoiding breaking changes in minor versions for end users of dbt. There are two types of breaking changes that may be included in minor versions: +- Changes to the [Python interface for adapter plugins](/guides/advanced/adapter-development/3-building-a-new-adapter). These changes are relevant _only_ to adapter maintainers, and they will be clearly communicated in documentation and release notes. +- Changes to metadata interfaces, including [artifacts](dbt-artifacts) and [logging](events-logging), signalled by a version bump. Those version upgrades may require you to update external code that depends on these interfaces, or to coordinate upgrades between dbt orchestrations that share metadata, such as [state-powered selection](understanding-state). + ### How we version adapter plugins -When you use dbt, you're using `dbt-core` together with an adapter plugin specific to your database. You can see the current list in ["Available adapters"](available-adapters). Both `dbt-core` and dbt adapter plugins follow semantic versioning. +When you use dbt, you're using `dbt-core` together with an adapter plugin specific to your database. You can see the current list in [Supported Data Platforms](supported-data-platforms). Both `dbt-core` and dbt adapter plugins follow semantic versioning. `dbt-core` and adapter plugins coordinate new features and behind-the-scenes changes in minor releases. When it comes to fixing bugs, sooner is better—so patch versions are released independently for `dbt-core` and plugins. diff --git a/website/docs/docs/dbt-versions/product-lifecycles.md b/website/docs/docs/dbt-versions/product-lifecycles.md new file mode 100644 index 00000000000..f676c6af2eb --- /dev/null +++ b/website/docs/docs/dbt-versions/product-lifecycles.md @@ -0,0 +1,48 @@ +--- +title: "dbt Product lifecycles" +id: "product-lifecycles" +description: "Learn about dbt Labs' product lifecycles." +--- + +dbt Labs is directly involved with the maintenance of two products: + +- dbt Core: The [open-source](https://github.com/dbt-labs/dbt-core) software that’s freely available +- dbt Cloud: The managed [SaaS solution](https://www.getdbt.com/signup) built on top of dbt Core + +Any dbt feature will fall into one of the following lifecycle states: + + +### dbt Cloud + +- **Beta:** Beta features may be made available for the purpose of customer testing and evaluation. These may not be feature-complete or fully stable. There may still be some planned additions and modifications to product behaviors while in Beta. Breaking changes may occur – although we will do our best to communicate them in advance, we may not always be able to do so. Beta features may not be fully documented, technical support may be limited, and service level objectives (SLOs) may not be provided. + +- **Preview (Private or Public):** Preview features are stable and can be considered for production deployments. There may still be some planned additions and modifications to product behaviors before moving to General Availability. We may also introduce new functionality to Preview features that is not backward compatible. Preview features include documentation, technical support, and include service level objectives (SLOs). Features in Preview are generally provided at no extra cost, although they may become paid features in their Generally Available state. + +- **Generally Available(GA):** Generally Available features provide stable features that can be considered for production deployments. Service level agreements (SLAs) apply to GA features, and these features include documentation and technical support. + +- **Deprecated:** Features in this state are not actively worked on or enhanced by dbt Labs and will continue to function as-is until their removal date. + +- **Removed:** Removed features no longer have any level of product functionality or platform support. + +### dbt Core + +We release dbt Core in the following lifecycle states. Core releases follow semantic versioning, which you can read more about in [About Core versions](/docs/dbt-versions/core). +- **Unreleased:** We will include this functionality in the next minor version prerelease. However, we make no commitments about its behavior or implementation. As maintainers, we reserve the right to change any part of it, or remove it entirely (with an accompanying explanation.) + +- **Prerelease:** + * **Beta:** The purpose of betas is to provide a first glimpse of the net-new features that will be arriving in this minor version, when it has its + final release. The code included in beta should work, without regression from existing functionality, or negative interactions with other released + features. Net-new features included in a beta _may be_ incomplete or have known edge cases/limitations. Changes included in beta are not “locked,” + and the maintainers reserve the right to change or remove (with an explanation). + * **Release Candidate:** The purpose of a release candidate is to offer a 2-week window for more extensive production-level testing, with the goal of + catching regressions before they go live in a final release. Users can believe that features in a Release Candidate will work the same on release day. + However, if we do find a significant bug, we do still reserve the right to change or remove the underlying behavior, with a clear explanation. + - **Released:** Ready for use in production. + - **Experimental:** Features we release for general availability, which we believe are usable in their current form, but for which we may document + additional caveats. + - **Undocumented:** These are subsets of dbt Core functionality that are internal, not contracted, or intentionally left undocumented. Do not consider + this functionality part of that release’s product surface area. + - **Deprecated:** Features in this state are not actively worked on or enhanced by dbt Labs and will continue to function as-is until their removal date. + - **Removed:** Removed features no longer have any level of product functionality or platform support. + + diff --git a/website/docs/docs/dbt-cloud/release-notes.md b/website/docs/docs/dbt-versions/release-notes.md similarity index 87% rename from website/docs/docs/dbt-cloud/release-notes.md rename to website/docs/docs/dbt-versions/release-notes.md index 95a0d6b0a07..db25af163ae 100644 --- a/website/docs/docs/dbt-cloud/release-notes.md +++ b/website/docs/docs/dbt-versions/release-notes.md @@ -1,6 +1,6 @@ --- -title: "dbt Cloud Release Notes" -id: "release-notes" +title: "About dbt Cloud Release Notes" +id: "dbt-cloud-release-notes" description: "Release notes for dbt Cloud" --- diff --git a/website/docs/docs/dbt-versions/release-notes/04-Nov-2022/ide-features-ide-deprecation.md b/website/docs/docs/dbt-versions/release-notes/04-Nov-2022/ide-features-ide-deprecation.md new file mode 100644 index 00000000000..becad55356c --- /dev/null +++ b/website/docs/docs/dbt-versions/release-notes/04-Nov-2022/ide-features-ide-deprecation.md @@ -0,0 +1,32 @@ +--- + +title: "Extra features in new IDE, and classic IDE deprecation" +id: "ide-features-ide-deprecation" +description: "Enhancement and Deprecation: Extra features in new IDE, and classic IDE deprecation" +sidebar_label: "Enhancement and deprecation: Extra features in the new IDE and classic IDE deprecation" +tags: [Nov-29-2022, v1.1.67.0] + +--- + +### Extra features in new and refreshed IDE + +The refreshed version of the dbt Cloud IDE has launched four brand-new additional features, making it easier and faster for you to develop in the IDE. + +The new features are: + +- **Formatting** — Format your dbt SQL files to a single code style with a click of a button. This uses the tool [sqlfmt](https://github.com/tconbeer/sqlfmt). +- **Git diff view** — Highlights the changes in a file before opening a pull request. +- **dbt autocomplete** — There are four new types of autocomplete features to help you develop faster: + - Use `ref` to autocomplete your model names + - Use `source` to autocomplete your source name + table name + - Use `macro` to autocomplete your arguments + - Use `env var` to autocomplete env var +- **Dark mode** — Use dark mode in the dbt Cloud IDE for low-light environments. + +Read more about all the [Cloud IDE features](/docs/get-started/dbt-cloud-features). + +### Classic IDE deprecation notice + +In December 2022, dbt Labs will deprecate the classic IDE. The [new and refreshed IDE](/docs/get-started/develop-in-the-cloud) will be available for _all_ dbt Cloud users. You will no longer be able to access the classic IDE and dbt Labs might introduce changes that break the classic IDE. + +With deprecation, dbt Labs will only support the refreshed version of the dbt Cloud IDE. diff --git a/website/docs/docs/dbt-versions/release-notes/05-Oct-2022/cloud-integration-azure.md b/website/docs/docs/dbt-versions/release-notes/05-Oct-2022/cloud-integration-azure.md new file mode 100644 index 00000000000..70170b0410f --- /dev/null +++ b/website/docs/docs/dbt-versions/release-notes/05-Oct-2022/cloud-integration-azure.md @@ -0,0 +1,15 @@ +--- +title: "Announcing dbt Cloud’s native integration with Azure DevOps" +id: "cloud-integration-azure" +description: "dbt Cloud native integration with Azure DevOps" +sidebar_label: "Improvement: Native integration with Azure DevOps" +tags: [Oct-11-2022, v1.1.64] +--- + +dbt Cloud now offers a native integration with Azure DevOps for dbt Cloud customers on the enterprise plan. We built this integration to remove friction, increase security, and unlock net new product experiences for our customers. [Setting up the Azure DevOps integration](/docs/collaborate/git/connect-azure-devops) in dbt Cloud provides: + - easy dbt project set up, + - an improved security posture, + - repo permissions enforcement in dbt Cloud IDE, and + - dbt Cloud Slim CI. + +Check out our [live blog post](https://www.getdbt.com/blog/dbt-cloud-integration-azure-devops/) to learn more! diff --git a/website/docs/docs/dbt-versions/release-notes/05-Oct-2022/new-ide-launch.md b/website/docs/docs/dbt-versions/release-notes/05-Oct-2022/new-ide-launch.md new file mode 100644 index 00000000000..d57d051540c --- /dev/null +++ b/website/docs/docs/dbt-versions/release-notes/05-Oct-2022/new-ide-launch.md @@ -0,0 +1,21 @@ +--- +title: "Enhancement: New Cloud IDE launch" +id: "new-ide-launch" +description: "Enhancement: New Cloud IDE launch" +sidebar_label: "Snappier, faster, and new Cloud IDE" +tags: [Oct-18-2022] +--- + +## Introducing a snappier, improved, and powerful Cloud IDE + +The new version of the Cloud IDE makes it easy for you to build data models without thinking much about environment setup and configuration. + +The new Cloud IDE includes performance upgrades, ergonomics improvements, and some delightful enhancements! + +Some of the improvements include: + +- Improved Cloud IDE startup time (starting the IDE), interaction time (saving and committing), and reliability. +- Better organization and navigation with features like drag and drop of files, breadcrumb, build button drop-down, and more. +- You can use new features like auto-format your file, auto-complete model names, and git diff view to see your changes before making a pull request. + +Read more about the new [Cloud IDE features](/docs/get-started/dbt-cloud-features) and check out [New and improved Cloud IDE](https://www.getdbt.com/blog/new-improved-cloud-ide/) blog for more info! diff --git a/website/docs/docs/dbt-versions/release-notes/05-Sept-2022/liststeps-endpoint-deprecation.md b/website/docs/docs/dbt-versions/release-notes/05-Sept-2022/liststeps-endpoint-deprecation.md new file mode 100644 index 00000000000..07eb72986f2 --- /dev/null +++ b/website/docs/docs/dbt-versions/release-notes/05-Sept-2022/liststeps-endpoint-deprecation.md @@ -0,0 +1,15 @@ +--- +title: "List Steps API endpoint deprecation warning" +id: "liststeps-endpoint-deprecation.md" +description: "List Steps API deprecation" +sidebar_label: "Deprecation: List Steps API endpoint" +tags: [Sept-15-2022] +--- + +On October 14th, 2022 dbt Labs is deprecating the [List Steps](https://docs.getdbt.com/dbt-cloud/api-v2#tag/Runs/operation/listSteps) API endpoint. From October 14th, any GET requests to this endpoint will fail. Please prepare to stop using the List Steps endpoint as soon as possible. + +dbt Labs will continue to maintain the [Get Run](https://docs.getdbt.com/dbt-cloud/api-v2#tag/Runs/operation/getRunById) endpoint, which is a viable alternative depending on the use case. + +You can fetch run steps for an individual run with a GET request to the following URL: + +`https://cloud.getdbt.com/api/v2/accounts/{accountId}/runs/{runId}/?include_related=["run_steps"]` diff --git a/website/docs/docs/dbt-versions/release-notes/05-Sept-2022/metadata-api-data-retention-limits.md b/website/docs/docs/dbt-versions/release-notes/05-Sept-2022/metadata-api-data-retention-limits.md new file mode 100644 index 00000000000..20a6edb1c26 --- /dev/null +++ b/website/docs/docs/dbt-versions/release-notes/05-Sept-2022/metadata-api-data-retention-limits.md @@ -0,0 +1,11 @@ +--- +title: "Query the previous three months of data using the metadata API" +id: "metadata-api-data-retention-limits.md" +description: "Metadata API data retention limits" +sidebar_label: "Fix: Metadata API data retention limits" +tags: [Sept-29-2022] +--- + +In order to make the metadata API more scalable and improve its latency, we’ve implemented data retention limits. The metadata API can now query data from the previous three months. For example, if today was March 1, you could query data back to January 1st. + +For more information, see "[Metadata API](/docs/dbt-cloud-apis/metadata-api)" diff --git a/website/docs/docs/dbt-versions/release-notes/06-Aug-2022/ide-improvement-beta.md b/website/docs/docs/dbt-versions/release-notes/06-Aug-2022/ide-improvement-beta.md new file mode 100644 index 00000000000..9cb5827e9de --- /dev/null +++ b/website/docs/docs/dbt-versions/release-notes/06-Aug-2022/ide-improvement-beta.md @@ -0,0 +1,36 @@ +--- +title: "Enhancement: New Cloud IDE beta" +id: "ide-improvements-beta.md" +description: "Adding IDE performance and reliability improvements" +sidebar_label: "Enhancement: New cloud IDE beta" +tags: [Aug-16-2022] +--- + +:::info Beta feature +Read more about the [Cloud IDE beta](https://www.getdbt.com/blog/staging-highlights-the-latest-from-dbt-labs/) and [submit your expression of interest](https://docs.google.com/forms/d/e/1FAIpQLSdlU65gqTZPyGAUc16SkxqTc50NO9vdq_KGx1Mjm_4FB_97FA/viewform) to join the new Cloud IDE beta group. + +::: + +Building code on the dbt Cloud integrated development environment (IDE) should be seamless, and the tool that you’re using should not add more distractions to the data work that is often already confusing and difficult. + +We're now excited to start rolling out the IDE Beta version, which focuses on performance and reliability improvements. + +The classic IDE currently has severe performance and reliability issues. It takes a long time to start up the IDE, and the interactions (saving or committing) are slow. + +Our focus is on performance and reliability, particularly around the following four metrics: + +- Spinner time for cold start = Time that you see a spinner in a brand new session. +- Spinner time for hot start = Time that you see a spinner when you resume an existing session (return within 3 hours). +- Time to save = Time between saving a file and the IDE being ready for edit. +- Time to git = Time between making a commit and the IDE being ready for edit. + +**Improvements:** + +To address the issue, we rebuilt the IDE and made some significant architectural changes to the way we work. These changes will help improve the IDE performance and reliability: + +- Your IDE spinner and interaction time will be faster, regardless of the size of your dbt project. + - Instead of fetching and downloading all the contents for the files during any change, the IDE only needs the file tree/name. This means that starting up the IDE should no longer depend on the size of the dbt project. This also helps make the interaction with the IDE (saving files and committing changes) more snappy. + +- Your IDE spinner time will be quicker as you can access it without needing to wait for the rpc server to finish getting ready. + + diff --git a/website/docs/docs/dbt-versions/release-notes/06-Aug-2022/support-redshift-ra3.md b/website/docs/docs/dbt-versions/release-notes/06-Aug-2022/support-redshift-ra3.md new file mode 100644 index 00000000000..b70783a2ed9 --- /dev/null +++ b/website/docs/docs/dbt-versions/release-notes/06-Aug-2022/support-redshift-ra3.md @@ -0,0 +1,18 @@ +--- +title: "Enhancement: Support for cross-database sources on Redshift RA3 instances" +id: "support-redshift-ra3.md" +description: "Adding support for cross-database queries for RA3" +sidebar_label: "Enhancement: Support for RA3" +tags: [Aug-31-2022, 1.1.61.5] + +--- + +Cross-database queries for RA3 instances are now supported by dbt Cloud projects using a Redshift connection. + +With cross-database queries, you can seamlessly query data from any database in the cluster, regardless of which database you are connected to with dbt. + +The [connection configuration](https://docs.getdbt.com/reference/warehouse-profiles/redshift-profile) `ra3_node` has been defaulted to `true`. This allows users to: + +- benefit from the full RA3 nodes’ capabilities, +- generate appropriate dbt documentation. + diff --git a/website/docs/docs/dbt-versions/release-notes/07-July-2022/render-lineage-feature.md b/website/docs/docs/dbt-versions/release-notes/07-July-2022/render-lineage-feature.md new file mode 100644 index 00000000000..cc2478ee11e --- /dev/null +++ b/website/docs/docs/dbt-versions/release-notes/07-July-2022/render-lineage-feature.md @@ -0,0 +1,16 @@ +--- +title: "Enhancement: Large DAG feature" +id: "render-lineage-feature" +description: "Add a render button to visualize large DAGs" +sidebar_label: "Enhancement: Large DAG feature" +tags: [July-5-2022, v1.1.56] + +--- + +You can now select **Render Lineage** to visualize large DAGs. + +Large DAGs can take a long time (10 or more seconds, if not minutes) to render and can cause browsers to crash. + +The new button prevents large DAGs from rendering automatically. Instead, you can select **Render Lineage** to load the visualization. This should affect about 15% of the DAGs. + + diff --git a/website/docs/docs/dbt-cloud/release-notes/08-May-2022/gitlab-auth.md b/website/docs/docs/dbt-versions/release-notes/08-May-2022/gitlab-auth.md similarity index 92% rename from website/docs/docs/dbt-cloud/release-notes/08-May-2022/gitlab-auth.md rename to website/docs/docs/dbt-versions/release-notes/08-May-2022/gitlab-auth.md index 98ce35f7649..d468f557fa2 100644 --- a/website/docs/docs/dbt-cloud/release-notes/08-May-2022/gitlab-auth.md +++ b/website/docs/docs/dbt-versions/release-notes/08-May-2022/gitlab-auth.md @@ -6,7 +6,6 @@ sidebar_label: "Enhancement: Refreshing GitLab OAuth Access" tags: [May-19-2022, v1.1.52] --- -On May 22, GitLab changed how they treat [OAuth access tokens that don't expire](https://docs.gitlab.com/ee/update/deprecations.html#oauth-tokens-without-expiration). We updated our IDE logic to handle OAuth token expiration more gracefully. Now, the first time your token expires after 2 hours of consecutive IDE usage, you will have to re-authenticate in GitLab to refresh your expired OAuth access token. We will handle subsequent refreshes for you if you provide the authorization when you re-authenticate. +On May 22, GitLab changed how they treat [OAuth access tokens that don't expire](https://docs.gitlab.com/ee/update/deprecations.html#oauth-tokens-without-expiration). We updated our IDE logic to handle OAuth token expiration more gracefully. Now, the first time your token expires after 2 hours of consecutive IDE usage, you will have to re-authenticate in GitLab to refresh your expired OAuth access token. We will handle subsequent refreshes for you if you provide the authorization when you re-authenticate. This additional security layer in the IDE is available only to the dbt Cloud enterprise plan. - diff --git a/website/docs/docs/dbt-cloud/release-notes/09-April-2022/audit-log.md b/website/docs/docs/dbt-versions/release-notes/09-April-2022/audit-log.md similarity index 58% rename from website/docs/docs/dbt-cloud/release-notes/09-April-2022/audit-log.md rename to website/docs/docs/dbt-versions/release-notes/09-April-2022/audit-log.md index 610c0618ae2..cf16d033868 100644 --- a/website/docs/docs/dbt-cloud/release-notes/09-April-2022/audit-log.md +++ b/website/docs/docs/dbt-versions/release-notes/09-April-2022/audit-log.md @@ -8,4 +8,6 @@ tags: [April-26-2022] --- -To review actions performed by people in your organization, dbt provides logs of audited user and system events. The dbt Cloud audit log lists events triggered in your organization within the last 90 days. The audit log includes details such as who performed the action, what the action was, and when it was performed. More details in the [docs](dbt-cloud/dbt-cloud-enterprise/audit-log). +To review actions performed by people in your organization, dbt provides logs of audited user and system events. The dbt Cloud audit log lists events triggered in your organization within the last 90 days. + +The audit log includes details such as who performed the action, what the action was, and when it was performed. For more details, review [the audit log for dbt Cloud Enterprise](/docs/collaborate/manage-access/audit-log) documentation. diff --git a/website/docs/docs/dbt-cloud/release-notes/09-April-2022/credentials-saved.md b/website/docs/docs/dbt-versions/release-notes/09-April-2022/credentials-saved.md similarity index 94% rename from website/docs/docs/dbt-cloud/release-notes/09-April-2022/credentials-saved.md rename to website/docs/docs/dbt-versions/release-notes/09-April-2022/credentials-saved.md index fe1b05b4282..971c83a4a2a 100644 --- a/website/docs/docs/dbt-cloud/release-notes/09-April-2022/credentials-saved.md +++ b/website/docs/docs/dbt-versions/release-notes/09-April-2022/credentials-saved.md @@ -3,7 +3,7 @@ title: "Credentials no longer accidentally wiped when editing an environment" id: "credentials-saved" description: "Credentials are now saved when editing an environment." sidebar_label: "Fix: Credentials saved" -tags: [April-29-2022] +tags: [April-29-2022 v1.1.51] --- We resolved a bug where when updating unencrypted fields (e.g. threads, schema name) in an environment setting would cause secret fields (e.g. password, keypair, credential details) to be deleted from that environment. Now users can freely update environment settings without fear of unintentionally wiping credentials. diff --git a/website/docs/docs/dbt-cloud/release-notes/09-April-2022/email-verification.md b/website/docs/docs/dbt-versions/release-notes/09-April-2022/email-verification.md similarity index 100% rename from website/docs/docs/dbt-cloud/release-notes/09-April-2022/email-verification.md rename to website/docs/docs/dbt-versions/release-notes/09-April-2022/email-verification.md diff --git a/website/docs/docs/dbt-cloud/release-notes/09-April-2022/scheduler-improvements.md b/website/docs/docs/dbt-versions/release-notes/09-April-2022/scheduler-improvements.md similarity index 100% rename from website/docs/docs/dbt-cloud/release-notes/09-April-2022/scheduler-improvements.md rename to website/docs/docs/dbt-versions/release-notes/09-April-2022/scheduler-improvements.md diff --git a/website/docs/docs/dbt-cloud/release-notes/10-March-2022/ide-timeout-message.md b/website/docs/docs/dbt-versions/release-notes/10-March-2022/ide-timeout-message.md similarity index 90% rename from website/docs/docs/dbt-cloud/release-notes/10-March-2022/ide-timeout-message.md rename to website/docs/docs/dbt-versions/release-notes/10-March-2022/ide-timeout-message.md index 237a189105a..2f1fa321753 100644 --- a/website/docs/docs/dbt-cloud/release-notes/10-March-2022/ide-timeout-message.md +++ b/website/docs/docs/dbt-versions/release-notes/10-March-2022/ide-timeout-message.md @@ -10,4 +10,4 @@ We fixed an issue where a spotty internet connection could cause the “IDE sess We updated the health check logic so it now excludes client-side connectivity issues from the IDE session check. If you lose your internet connection, we no longer update the health-check state. Now, losing internet connectivity will no longer cause this unexpected message. - + diff --git a/website/docs/docs/dbt-cloud/release-notes/10-March-2022/prep-and-waiting-time.md b/website/docs/docs/dbt-versions/release-notes/10-March-2022/prep-and-waiting-time.md similarity index 100% rename from website/docs/docs/dbt-cloud/release-notes/10-March-2022/prep-and-waiting-time.md rename to website/docs/docs/dbt-versions/release-notes/10-March-2022/prep-and-waiting-time.md diff --git a/website/docs/docs/dbt-cloud/release-notes/11-February-2022/DAG-updates-more.md b/website/docs/docs/dbt-versions/release-notes/11-February-2022/DAG-updates-more.md similarity index 100% rename from website/docs/docs/dbt-cloud/release-notes/11-February-2022/DAG-updates-more.md rename to website/docs/docs/dbt-versions/release-notes/11-February-2022/DAG-updates-more.md diff --git a/website/docs/docs/dbt-cloud/release-notes/11-February-2022/service-tokens-more.md b/website/docs/docs/dbt-versions/release-notes/11-February-2022/service-tokens-more.md similarity index 93% rename from website/docs/docs/dbt-cloud/release-notes/11-February-2022/service-tokens-more.md rename to website/docs/docs/dbt-versions/release-notes/11-February-2022/service-tokens-more.md index 671608a803a..5ad241d1365 100644 --- a/website/docs/docs/dbt-cloud/release-notes/11-February-2022/service-tokens-more.md +++ b/website/docs/docs/dbt-versions/release-notes/11-February-2022/service-tokens-more.md @@ -6,7 +6,7 @@ sidebar_label: "Service tokens and more" tags: [v1.1.45, February-16-2022] --- -Service tokens can now be assigned granular permissions to enforce least privilege access. If you're on Enterprise, you can assign any enterprise permission set to newly issued service tokens. If you're on Teams, you can assign the Job Admin permission set to newly issued service tokens. We highly recommend you re-issue service tokens with these new permissions to increase your security posture! See docs [here](https://docs.getdbt.com/docs/dbt-cloud/dbt-cloud-api/service-tokens#permissions-for-service-account-tokens). +Service tokens can now be assigned granular permissions to enforce least privilege access. If you're on Enterprise, you can assign any enterprise permission set to newly issued service tokens. If you're on Teams, you can assign the Job Admin permission set to newly issued service tokens. We highly recommend you re-issue service tokens with these new permissions to increase your security posture! See docs [here](https://docs.getdbt.com/docs/dbt-cloud-apis/service-tokens#permissions-for-service-account-tokens). #### New products and features diff --git a/website/docs/docs/dbt-cloud/release-notes/12-January-2022/IDE-autocomplete-more.md b/website/docs/docs/dbt-versions/release-notes/12-January-2022/IDE-autocomplete-more.md similarity index 100% rename from website/docs/docs/dbt-cloud/release-notes/12-January-2022/IDE-autocomplete-more.md rename to website/docs/docs/dbt-versions/release-notes/12-January-2022/IDE-autocomplete-more.md diff --git a/website/docs/docs/dbt-cloud/release-notes/12-January-2022/model-timing-more.md b/website/docs/docs/dbt-versions/release-notes/12-January-2022/model-timing-more.md similarity index 100% rename from website/docs/docs/dbt-cloud/release-notes/12-January-2022/model-timing-more.md rename to website/docs/docs/dbt-versions/release-notes/12-January-2022/model-timing-more.md diff --git a/website/docs/docs/dbt-cloud/release-notes/13-dbt-cloud-changelog-2019-2020.md b/website/docs/docs/dbt-versions/release-notes/13-dbt-cloud-changelog-2019-2020.md similarity index 99% rename from website/docs/docs/dbt-cloud/release-notes/13-dbt-cloud-changelog-2019-2020.md rename to website/docs/docs/dbt-versions/release-notes/13-dbt-cloud-changelog-2019-2020.md index 63603c45653..7da7dad0a98 100644 --- a/website/docs/docs/dbt-cloud/release-notes/13-dbt-cloud-changelog-2019-2020.md +++ b/website/docs/docs/dbt-versions/release-notes/13-dbt-cloud-changelog-2019-2020.md @@ -197,7 +197,7 @@ initial support for a GitLab integration and self-service RBAC configuration. ## dbt Cloud v1.1.7 [September 3, 2020] This release adds a Release Candidate for [dbt -v0.18.0](migration-guide/upgrading-to-0-18-0) and +v0.18.0](/guides/migration/versions) and includes bugfixes and improvements to the Cloud IDE and job scheduler. diff --git a/website/docs/docs/dbt-cloud/release-notes/14-dbt-cloud-changelog-2021.md b/website/docs/docs/dbt-versions/release-notes/14-dbt-cloud-changelog-2021.md similarity index 96% rename from website/docs/docs/dbt-cloud/release-notes/14-dbt-cloud-changelog-2021.md rename to website/docs/docs/dbt-versions/release-notes/14-dbt-cloud-changelog-2021.md index fa8e5bf34ed..cf350b78778 100644 --- a/website/docs/docs/dbt-cloud/release-notes/14-dbt-cloud-changelog-2021.md +++ b/website/docs/docs/dbt-versions/release-notes/14-dbt-cloud-changelog-2021.md @@ -27,11 +27,11 @@ It's one of the best weeks of the year - it's [Coalesce](https://coalesce.getdbt We shipped environment variables in dbt Cloud. Environment variables create a way to separate code from configuration - allowing you to set config based on context and keep secrets like git tokens securely stored. #### New products and features -- You can now add environment variables to your dbt Cloud project. Why does this matter? Environment variables are a fundamental building block of a dbt project, which until now, we only enabled in dbt Core. They power many use cases such as cloning private packages, limiting the amount of data that is processed in development environments, changing your data sources depending on the environment, and more. Read about environment variables in our [blog post](https://blog.getdbt.com/introducing-environment-variables-in-dbt-cloud/) or [docs](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables). +- You can now add environment variables to your dbt Cloud project. Why does this matter? Environment variables are a fundamental building block of a dbt project, which until now, we only enabled in dbt Core. They power many use cases such as cloning private packages, limiting the amount of data that is processed in development environments, changing your data sources depending on the environment, and more. Read about environment variables in our [blog post](https://blog.getdbt.com/introducing-environment-variables-in-dbt-cloud/) or [docs](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables). ## dbt Cloud v1.1.38 (October 27, 2021) -Have you used the [Metadata API](https://docs.getdbt.com/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview) yet? The Metadata API is available to customers on the Team and Enterprise plans, and with it, you can learn tons about your dbt project, if it's running dbt v0.19.0 or later. You can now query information about _any_ run, not just the last run of a job. Mo' data, mo' fun! +Have you used the [Metadata API](https://docs.getdbt.com/docs/dbt-cloud-apis/metadata-api) yet? The Metadata API is available to customers on the Team and Enterprise plans, and with it, you can learn tons about your dbt project, if it's running dbt v0.19.0 or later. You can now query information about _any_ run, not just the last run of a job. Mo' data, mo' fun! ## dbt Cloud v1.1.37 (October 13, 2021) @@ -87,7 +87,7 @@ We added a DAG in the IDE, so that you can see your model dependencies as you de The Metadata API is now in GA! When dbt Cloud invokes certain commands like run, test, seed, etc, dbt generates metadata in the form of [artifacts](https://docs.getdbt.com/reference/artifacts/dbt-artifacts). These artifacts give you tons of information about project set up, run times, test details, compiled SQL, and so much more. Now dbt Cloud serves a GraphQL API which supports arbitrary queries over these artifacts, so you can retrieve the metadata you want almost instantaneously. #### New products and features -- The Metadata API is the start of our metadata product suite. Learn more about how to use the Metadata API [here](https://docs.getdbt.com/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview). +- The Metadata API is the start of our metadata product suite. Learn more about how to use the Metadata API [here](https://docs.getdbt.com/docs/dbt-cloud-apis/metadata-api). - dbt Enterprise customers using GitHub now get better fine-grained access control in their dbt projects. dbt will enforce git permissions for every developer to ensure that read / write policies in GitHub carry through to the IDE. @@ -139,7 +139,7 @@ We shipped a far better experience for GitLab users. Be sure to check out new CI #### New products and features -- `Slim CI`: We’ve made Slim CI available for all our cloud customers! With Slim CI, you don't have to rebuild and test all your models; you can instruct dbt Cloud to run jobs on only modified or new resources. If you are a GitHub or GitLab user, try creating a new job that runs on pull requests and you can signal to dbt to run only on these modified resources by including the `state:modified+` argument. Read more about Slim CI [here](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github#slim-ci). +- `Slim CI`: We’ve made Slim CI available for all our cloud customers! With Slim CI, you don't have to rebuild and test all your models; you can instruct dbt Cloud to run jobs on only modified or new resources. If you are a GitHub or GitLab user, try creating a new job that runs on pull requests and you can signal to dbt to run only on these modified resources by including the `state:modified+` argument. Read more about Slim CI [here](/docs/deploy/cloud-ci-job). - Native GitLab authentication for dbt Cloud Developer and Team Tiers: We’ve shipped native GitLab auth into GA. You can now import new GitLab repos with a couple clicks, trigger CI builds when Merge Requests are opened in GitLab, and carry GitLab permissions through to dbt Cloud IDE's git actions. Read how to set up native GitLab auth [here](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab). @@ -178,7 +178,7 @@ A lot of improvements coming for GitLab webhooks and native auth. We also fixed - Fix SSO re-auth page - Fix blank verify email page - Resolve git refresh regression -- Fix missing "Run on Merge" button in Job creation/edit form- +- Fix missing "Run on Merge" button in Job creation/edit form- - Warn users they have unsaved changes - Updates test command suggestions and regex for common action suggestions - Updates order of stylesheet import to fix missing border bug @@ -243,7 +243,7 @@ Exciting things coming down the pipe - ongoing enhancements to the command bar e ## dbt Cloud v1.1.24 (April 14, 2021) -Phew! As our company grows, so too does our changelog! Look at all these! The big chunks you'll see here are related to some ongoing in-IDE work, focused on the command bar experience, as well as some partner & connection work (see the Gits, Databricks, and so forth), and of course ongoing longer-term bets around metadata! +Phew! As our company grows, so too does our changelog! Look at all these! The big chunks you'll see here are related to some ongoing in-IDE work, focused on the command bar experience, as well as some partner & connection work (see the Gits, Databricks, and so forth), and of course ongoing longer-term bets around metadata! #### Enhancements @@ -282,7 +282,7 @@ Phew! As our company grows, so too does our changelog! Look at all these! The bi ## dbt Cloud v1.1.23 (March 31, 2021) -Some backend work, some frontend work, some bug fixes: a nice mix for this release. A few user facing changes you may have noticed already are the persistence of dark/light mode settings across refresh (no more blinding IDE!), branches in the IDE being categorized by Active vs. Removed from Remote, and a tidier new file creation flow, with the file tree expanding to show the new file and opening a new tab to populate the said file! +Some backend work, some frontend work, some bug fixes: a nice mix for this release. A few user facing changes you may have noticed already are the persistence of dark/light mode settings across refresh (no more blinding IDE!), branches in the IDE being categorized by Active vs. Removed from Remote, and a tidier new file creation flow, with the file tree expanding to show the new file and opening a new tab to populate the said file! #### Enhancements @@ -291,11 +291,11 @@ Some backend work, some frontend work, some bug fixes: a nice mix for this relea - Upgrade to Tailwind 2.0 and FUI 0.0.5 - Allow users to create metadata tokens from the UI - Support manually-managed group memberships -- SSO: resolve bug w/ first & last names acting up +- SSO: resolve bug w/ first & last names acting up - Integrate Delighted for NPS surveys -- Add dbt 0.19.1rc1 to Cloud +- Add dbt 0.19.1rc1 to Cloud - Add an account-level setting to require users to re-authenticate via SSO -- Read-only metadata ServiceToken for Cloud +- Read-only metadata ServiceToken for Cloud - Persist IDE light mode / dark mode across refresh - Categorize & order git branches - Improve new file creation flow @@ -336,7 +336,7 @@ Rolling out a few long-term bets to ensure that our beloved dbt Cloud does not f ## dbt Cloud v1.1.21 (March 3, 2021) -This changelog wraps up work on what we've been calling the SQL Drawer in the IDE - some design nudges, some interface adjustments, overall a cleaner and snappier experience. If you haven't dipped into the IDE in a while it's worth taking a look! Some back-end work as well, making SSO and role based admin easier and more broadly available for Enterprise level folks, along with your usual assortment of bug squashes and iterations. +This changelog wraps up work on what we've been calling the SQL Drawer in the IDE - some design nudges, some interface adjustments, overall a cleaner and snappier experience. If you haven't dipped into the IDE in a while it's worth taking a look! Some back-end work as well, making SSO and role based admin easier and more broadly available for Enterprise level folks, along with your usual assortment of bug squashes and iterations. #### Enhancements @@ -359,7 +359,7 @@ This changelog wraps up work on what we've been calling the SQL Drawer in the ID ## dbt Cloud v1.1.20 (February 17, 2021) -Continued stability and quality of life improvements for folks with multiple accounts and projects - no longer will you have to remember the chronological order of birth of your accounts and projects, as they'll be ordered by the much easier to parse (for human brains anyway) alphabetical order. We're also shipping some experience improvements in the SQL Drawer at the bottom half of the IDE. +Continued stability and quality of life improvements for folks with multiple accounts and projects - no longer will you have to remember the chronological order of birth of your accounts and projects, as they'll be ordered by the much easier to parse (for human brains anyway) alphabetical order. We're also shipping some experience improvements in the SQL Drawer at the bottom half of the IDE. #### Enhancements @@ -377,7 +377,7 @@ Continued stability and quality of life improvements for folks with multiple acc ## dbt Cloud v1.1.19 (February 3, 2021) -The latest release of dbt (Oh Nineteen Oh) is now available for your enjoyment on dbt Cloud! We're also releasing some service token pieces here, though they're not quite ready for wide release yet. Moving forward, Oh Nineteen Oh will probably end up being the minimum version required to run the Metadata API & Metadata Toolkit, so, this is a big release! +The latest release of dbt (Oh Nineteen Oh) is now available for your enjoyment on dbt Cloud! We're also releasing some service token pieces here, though they're not quite ready for wide release yet. Moving forward, Oh Nineteen Oh will probably end up being the minimum version required to run the Metadata API & Metadata Toolkit, so, this is a big release! #### Enhancements @@ -417,4 +417,3 @@ Most notable things here are around foundational work toward future feature rele - Add DBT_CLOUD_CONTEXT environment variable - Add logic to hide IP whitelist message for on-prem customers - fix 0.19.0rc1 run image dependencies - diff --git a/website/docs/docs/dbt-versions/upgrade-core-in-cloud.md b/website/docs/docs/dbt-versions/upgrade-core-in-cloud.md new file mode 100644 index 00000000000..f0b99499891 --- /dev/null +++ b/website/docs/docs/dbt-versions/upgrade-core-in-cloud.md @@ -0,0 +1,314 @@ +--- +title: "Upgrade Core version in Cloud" +id: "upgrade-core-in-cloud" +--- + +## Upgrading to the latest version of dbt in Cloud + +In dbt Cloud, both jobs and environments are configured to use a specific version of dbt Core. The version can be upgraded at any time. + +### Environments + +Navigate to the settings page of an environment, then click **edit**. Click the **dbt Version** dropdown bar and make your selection. From this list, you can select an available version of Core to associate with this environment. + + + +Be sure to save your changes before navigating away. + +### Jobs + +Each job in dbt Cloud can be configured to inherit parameters from the environment it belongs to. + + + +The example job seen in the screenshot above belongs to the environment "Prod". It inherits the dbt version of its environment as shown by the **Inherited from ENVIRONMENT_NAME (DBT_VERSION)** selection. You may also manually override the dbt version of a specific job to be any of the current Core releases supported by Cloud by selecting another option from the dropdown. + +## Supported Versions + +We have always encouraged our customers to upgrade dbt Core versions whenever a new minor version is released. We released our first major version of dbt - `dbt 1.0` - in December 2021. Alongside this release, we updated our policy on which versions of dbt Core we will support in dbt Cloud. + + + + > **Starting with v1.0, any subsequent minor versions will be supported in dbt Cloud for 1 year post release. At the end of the 1 year window, accounts must upgrade to a supported version of dbt or risk service disruption.** + +We will continue to update this table so that customers know when we plan to stop supporting different versions of Core in dbt Cloud. + + + + +:::warning ⚠️ v0.X Non-Supported Period + Accounts had until the end of June 2022 to upgrade to dbt 1.0 or later. Pre-dbt 1.0 versions will no longer receive patch fixes, and our support team will no longer assist with dbt version specific help on non-supported versions of dbt. Additionally, jobs running dbt versions prior to 1.0 may experience service disruptions before the end of the year and may be removed from the dbt Cloud context by year end. You will receive additional notification before any planned disruption to your production jobs. +::: + +Starting in v1.0, dbt Cloud will ensure that you're always using the latest compatible patch release of `dbt-core` and plugins, including all the latest fixes. You may also choose to try prereleases of those patch releases before they are generally available. + + + +For more on version support and future releases, see [Understanding dbt Core versions](core-versions). + +#### What will actually happen on the end of support date? + +1 year post a minor version release of v1.X, we will try to run our users' projects on the latest release of dbt if they have not already upgraded their projects themselves. In a post dbt v1.0 world, there won't be breaking changes between minor versions of dbt, so we might be reasonably successful at upgrading our users' versions for them. However, our strong preference is for accounts to try to manage the upgrade process themselves which is a more cautious way to prevent failures to their production pipelines. We will give accounts consistent communication that they're hitting the end of their supported window, so they can plan accordingly. + +#### What should you be doing today? + +You should **upgrade to v1.0 as soon as you can** - and we recommend that you proceed **slowly and steadily**. + +Why? Because attempting to upgrade 6 minor versions at one time (v0.15.0 —> v0.21.0) implies 6x the potential for breaking changes, versus upgrading a single minor version. + +Refactoring code is much easier when you're updating a well-defined, constrained surface area. Doing things incrementally is the way to go. + +Additionally upgrading to more recent versions of dbt Core will enable better performance and more features in dbt Cloud. Below is a compatability matrix between dbt versions and dbt Cloud features. Hopefully this provides more motivation to always update your environments and jobs to run the latest version of dbt. + +| dbt Cloud Feature | dbt Core Version Needed | +| ------------- | -------------- | +| [Environment variable secret scrubbing](/docs/build/environment-variables#handling-secrets)| v1.0+ | +| DAG in the IDE | v0.20.0+| +| [Metadata API](/docs/dbt-cloud-apis/metadata-api) |v0.19.0+| +| [Dashboard status tiles](/docs/deploy/dashboard-status-tiles) | v0.19.0+ | +| [Slim CI](/docs/deploy/cloud-ci-job) | v0.18.0+ | + +#### Need help upgrading? + +If you want more advice on how to upgrade your dbt projects, check out our [migration guides](/guides/migration/versions/) and our [upgrading Q&A page](/docs/dbt-versions/upgrade-core-in-cloud#upgrading-legacy-versions-under-10). + +## Upgrading legacy versions under 1.0 + +This Q&A guide should help you figure out what changes you might need to make to successfully upgrade your version of dbt Core in dbt Cloud. As a reminder, we recommend everyone upgrade to the most recent version of dbt, as we will not support all versions of dbt in Cloud indefinitely. We document which versions of dbt Core we support [here](docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version#supported-versions). + +There aren't many breaking changes between minor versions, and it may be the case that you don't need to change any code to upgrade to a newer version of dbt in dbt Cloud. There are only breaking changes between minor versions of dbt before dbt 1.0. Minor releases starting with dbt 1.0, do not have breaking code changes. If there are no code changes needed, all you have to do is [change the settings](/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version.md) in your environment or job to run a more recent version of dbt. + +#### Changes between minor versions of dbt that will affect your project + +Below we try to help you answer the question of whether a known breaking change between minor versions of dbt will affect your project. If you answer "yes" to any of the questions below, we recommend that you read the migration guides that we've put together for every dbt minor version release. + +:::info An Important Note on Packages + +If you use any packages from [dbt Hub](https://hub.getdbt.com/), make sure you also upgrade to a version of the package that supports the dbt version you intend to upgrade to. You can see which dbt versions a package supports by checking on the `require-dbt-version:` in the package's dbt_project.yml file on GitHub. + +As an example, dbt-utils version 0.7.6 supports dbt v0.20, v0.21, and v1.0, as described in its [dbt_project.yml](https://github.com/dbt-labs/dbt-utils/blob/0.7.6/dbt_project.yml). + +After you've changed the package version in your packages.yml file, be sure to run `dbt deps` in the IDE to install the updated version. + +::: + +
+ Upgrading to v1.0.latest from v0.21 +

+ +:::info Universal change +Certain configurations in dbt_project.yml have been renamed +::: + +Existing projects will see non-breaking deprecation warnings. You can change three lines in most projects to remove the warnings: + + + +```yml +model-paths: ["models"] # formerly named "source-paths" +seed-paths: ["data"] # formerly named "data-paths" +clean-targets: + - "target" + - "dbt_packages" # formerly named "dbt_modules" +``` + + + +- Do you select tests using the old names for test types? (`test_type:schema`, `test_type:data`, `--schema`, `--data`) +- Do you have custom macro code that calls the (undocumented) global macros `column_list`, `column_list_for_create_table`, `incremental_upsert`? +- Do you have custom scripts that parse dbt artifacts? +- (BigQuery only) Do you use dbt's legacy capabilities around ingestion-time-partitioned tables? + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions/upgrading-to-v1.0). + +
+ + +
+ Upgrading to v0.21.latest from v0.20 +

+ +- Do you select specific sources to check freshness (`dbt snapshot-freshness --select `)? +- Do you have custom scripts that parse dbt JSON artifacts? +- (Snowflake only) Do you have custom macros or materializations that depend on using transactions, such as statement blocks with `auto_begin=True`? + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions). + +
+ + + +
+ Upgrading to v0.20.latest from v0.19 +

+ +- Does your project define any custom schema tests? +- Does your project use `adapter.dispatch` or the `spark_utils` package? +- Do you have custom scripts that parse dbt JSON artifacts? + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions). + +
+ + + +
+ Upgrading to v0.19.latest from v0.18 +

+
+ +:::info Important + +If you have not already, you must add `config-version: 2` to your dbt_project.yml file. +See **Upgrading to v0.17.latest from v0.16** below for more details. + +::: +
+ + +- Do you have custom scripts that parse dbt JSON artifacts? +- Do you have any custom materializations? + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions). + +
+ + +
+ Upgrading to v0.18.latest from v0.17 +

+ +- Do you directly call `adapter_macro`? + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions). + +
+ + + +
+ Upgrading to v0.17.latest from v0.16 +

+
+ +:::info Universal change + +You must add `config-version: 2` to your dbt_project.yml file. +::: +
+ + + +```yml +name: my_project +version: 1.0.0 + +config-version: 2 + +vars: + my_var: 1 + another_var: true + +models: + ... +``` + + + +
+ +:::info Universal change + +`vars:` are now defined not in your `models:` but are a separate section in dbt_project.yml file. +::: +
+ + + + +```yml +name: my_project +version: 1.0.0 + +config-version: 2 + +vars: + my_var: 1 + another_var: true + +models: + ... +``` + + + + +- Do you have dictionary configs in your dbt_project.yml such as `partition_by` or `persist_docs`? If yes, you need to add a preceding +. + + + +```yml + +models: + my_project: + reporting: + +partition_by: + field: date_day + data_type: timestamp +``` + + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions). + +
+ + +
+ Upgrading to v0.16.latest from v0.15 +

+ +- Do you use the custom `generate_schema_name` macro? +- Do you use `partition_by` config for BigQuery models? + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions). +
+ + +
+ Upgrading to v0.15.latest from v0.14 + +

+ +- Do you have a custom materialization? +- Do you have a macro that accesses `Relations` directly? + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions). +
+ +
+ Upgrading to v0.14.latest from v0.13 +

+ +- Do you still use `Archives`? +- Do you use the custom `generate_schema_name` macro? +- Do you use the `—non-destructive` flag? + +If you believe your project might be affected, read more details in the migration guide [here](/guides/migration/versions). +
+ + +#### Testing your changes before upgrading +Once you have an idea about what code changes you'll need to make, you can start implementing them. We recommend that you create a separate dbt project, **Upgrade Project**, to test your changes before making them live in your main dbt project. In your **Upgrade Project**, connect to the same repository that you use for your main dbt project, but this time, set the development environment [settings](docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version) to run the latest version of dbt Core. Next check out a branch `dbt-version-upgrade`, make the appropriate updates to your project (if needed), and see if your dbt project compiles and runs with the new version of dbt in the IDE. If jumping directly to the latest version of dbt is too far of a leap for your project, try iteratively getting your project to work on each successive minor version. There are years of development and a handful of breaking changes between two distant versions of dbt (e.g. 0.14 --> 1.0). There are far fewer between two subsequent versions of dbt, which is why upgrading regularly is important. + +Once you have your project compiling and running on the latest version of dbt in the development environment for your `dbt-version-upgrade` branch, try replicating one of your production jobs to run off your branch's code. You can do this by creating a new deployment environment for testing, setting the custom branch to 'ON' and referencing your `dbt-version-upgrade` branch. You'll also need to set the dbt version in this environment to the latest dbt Core version. + + + + + + +Then add a job to the new testing environment that replicates one of the production jobs your team relies on. If that job runs smoothly, you should be all set to merge your branch into main and change your development and deployment environments in your main dbt project to run off the newest version of dbt Core. diff --git a/website/docs/docs/dbt-cloud/deployments/airgapped.md b/website/docs/docs/deploy/airgapped.md similarity index 100% rename from website/docs/docs/dbt-cloud/deployments/airgapped.md rename to website/docs/docs/deploy/airgapped.md diff --git a/website/docs/docs/dbt-cloud/deployments/architecture.md b/website/docs/docs/deploy/architecture.md similarity index 95% rename from website/docs/docs/dbt-cloud/deployments/architecture.md rename to website/docs/docs/deploy/architecture.md index 370a3082472..2db65c53ea2 100644 --- a/website/docs/docs/dbt-cloud/deployments/architecture.md +++ b/website/docs/docs/deploy/architecture.md @@ -1,6 +1,6 @@ --- -id: deployment-architecture title: Architecture +id: architecture --- This page is intended to help both practitioners seeking to understand the architecture and data flow of the hosted dbt Cloud product. @@ -32,7 +32,7 @@ In addition to the application components, there are a few critical dependencies ### Data Warehouse Interaction -dbt Cloud's primary role is as a data processor, not a data store. The dbt Cloud application enables users to dispatch SQL to the warehouse for transformation purposes. However, it is possible for users to dispatch SQL that returns customer data into the dbt Cloud application. This data is never persisted and will only exist in memory on the instance in question. In order to properly lock down customer data, it is critical that proper data warehouse permissioning is applied to prevent improper access or storage of sensitive data. +dbt Cloud's primary role is as a data processor, not a data store. The dbt Cloud application enables users to dispatch SQL to the warehouse for transformation purposes. However, it is possible for users to dispatch SQL that returns customer data into the dbt Cloud application. This data is never persisted and will only exist in memory on the instance in question. In order to properly lock down customer data, it is critical that proper permissioning is applied to prevent improper access or storage of sensitive data. ### Deployment Architecture @@ -40,7 +40,7 @@ The following two sections describe the network architectures for dbt Cloud depl #### Hosted Network Architecture -The following diagram shows the network architecture for the hosted _Multi Tenant_ and _Single Tenant_ deployment types. While many of the specifications differ between the Multi Tenant and Single Tenant offerings the basic types of components illustrated below are mostly the same. Read below for more information on each of the components and how they might differ between the two deployment models. +The following diagram shows the network architecture for the hosted _Multi Tenant_ and _Single Tenant_ deployment types. While many of the specifications differ between the Multi Tenant and Single Tenant offerings the basic types of components illustrated below are mostly the same. Read below for more information on each of the components and how they might differ between the two deployment models. @@ -50,5 +50,5 @@ The following diagram shows the network architecture for the hosted _Multi Tenan - **EC2**: The hosted dbt Cloud deployments leverage a cluster of [AWS EC2](https://aws.amazon.com/ec2/) worker nodes to run the dbt Cloud application. - **EBS**: In order to store application data, dbt Cloud leverages [AWS Elastic Block Store](https://aws.amazon.com/ebs/) mounted to the EC2 instances described above. - **EFS**: An [AWS Elastic File System](https://aws.amazon.com/efs/) is provisioned for hosted deployments to store and manage local files from the dbt Cloud IDE. -- **S3**: [AWS Simple Storage Service (S3)](https://aws.amazon.com/s3/) is used to store dbt Cloud application logs and artifacts (such as those generated from dbt job runs). +- **S3**: [AWS Simple Storage Service (S3)](https://aws.amazon.com/s3/) is used to store dbt Cloud application logs and artifacts (such as those generated from dbt job runs). - **RDS**: The hosted dbt Cloud application leverages [AWS Postgres RDS](https://aws.amazon.com/rds/postgresql/) to store application information such as accounts, users, environments, etc. Note that as explained in the [Data Warehouse Interaction](#data-warehouse-interaction) section above, no data from an associated warehouse is ever stored in this database. diff --git a/website/docs/docs/deploy/cloud-ci-job.md b/website/docs/docs/deploy/cloud-ci-job.md new file mode 100644 index 00000000000..8b4afe0a544 --- /dev/null +++ b/website/docs/docs/deploy/cloud-ci-job.md @@ -0,0 +1,202 @@ +--- +title: "dbt Cloud CI job" +id: "cloud-ci-job" +description: "You can enable continuous integration (CI) to test every single change prior to deploying the code to production just like in a software development workflow." +--- + +## Overview + +dbt Cloud makes it easy to test every single code change you make prior to deploying that new logic into production. Once you've connected your [GitHub account](/docs/collaborate/git/connect-github), [GitLab account](/docs/collaborate/git/connect-gitlab), or [Azure DevOps account](/docs/collaborate/git/connect-azure-devops), you can configure jobs to run when new pull requests are opened against your dbt repo. + +dbt Cloud will build the models affected by the new pull request code change in a temp schema, which acts as a quasi-staging environment, and will also run the tests that you've written for these models as a check. When the continuous integration (CI) job completes, the run status will be shown directly in the pull request. This makes it possible to deploy new code to production with confidence. + +:::info Draft Pull Requests + +Jobs will _not_ be triggered by draft pull requests. If you would like jobs to run on each new commit, please mark your pull request as **Ready for review**. + +::: + +:::info GitLab Compatibility + +GitLab Webhooks are available to only GitLab users who have a paid or self-hosted GitLab account. + +::: + +:::info Common Errors +If you previously configured your dbt project by providing a generic git URL that clones using SSH, you need to [reconfigure the project](/docs/deploy/cloud-ci-job#reconnecting-your-dbt-project-to-use-dbt-clouds-native-integration-with-github-gitlab-or-azure-devops) to connect through dbt Cloud's native integration with GitHub, GitLab, or Azure DevOps instead. +::: + +## Understanding dbt Cloud Slim CI +When a [dbt Cloud CI job is set up](/docs/deploy/cloud-ci-job#configuring-a-dbt-cloud-ci-job), dbt Cloud will listen for webhooks from GitHub, GitLab, or Azure DevOps indicating that a new PR has been opened or updated with new commits. When one of these webhooks is received, dbt Cloud will enqueue a new run of the CI job. Crucially, this run will build into a temporary schema using the prefix `dbt_cloud_pr_`. This schema isolation acts as a quasi-staging environment, so that you can see the builds resulting from the code associated with the PR's commit sha. The unique schema name can be found in the run details for the given run, as shown below. + + + +After completing the dbt run, dbt Cloud will update the pull request in GitHub, GitLab, or Azure DevOps with a status message indicating the results of the run. The status message will state whether the models and tests ran successfully or not. You can enable a setting in your git provider that makes "successful pull request checks" a requirement to merge code. And finally, once the pull request is closed or merged, dbt Cloud will delete the temporary schema from your . + +### GitHub pull request example + +The green checkmark means the dbt builds and tests were successful. The *Details* link shown here will navigate you to the relevant CI run in dbt Cloud. + + +### GitLab pull request example + +The green checkmark means the dbt builds and tests were successful. Clicking the dbt Cloud pop up will navigate you to the relevant CI run in dbt Cloud. + + +### Azure DevOps pull request example + +The green checkmark means the dbt builds and tests were successful. Clicking on the dbt Cloud section navigates you to the relevant CI run in dbt Cloud. + + +## Configuring a dbt Cloud CI job + +Setting up a CI job is very similiar to setting up a normal production job that runs on a schedule; however, a CI job has some noteable differences. + +There are a few components that define a Slim CI job. +- The Slim CI job must defer to a production job. +- The Slim CI job commands need to have a `state:modified+` selector to build only new or changed models and their downstream dependents. Importantly, state comparison can only happen when there is a deferred job selected to compare state to. +- The Slim CI job must be triggered by pull request. + +#### Deferral and State Comparison + +When creating a job in dbt Cloud, you can set your execution settings to defer to a previous run state. Use the dropdown menu to select which *production* job you want to defer to. + + + +When a job is selected, dbt Cloud will look at the artifacts from that job's most recent successful run. dbt will then use those artifacts to determine the set of new and modified resources. + +In your job commands, you can signal to dbt to run only on these modified resources and their children by including the `state:modified+` argument. + +As example: + +``` +dbt build --select state:modified+ +``` + +Because dbt Cloud manages deferral and state environment variables, there is no need to specify `--defer` or `--state` flags. **Note:** Both jobs need to be running dbt v0.18.0 or later. + + +To learn more about state comparison and deferral in dbt, read the docs on [state](understanding-state). + +#### Using a webhook trigger + +In the **Triggers** section of the jobs settings, switch to the **Webhooks** tab, and then check the box next to **Run on Pull Requests?** as shown below. + + + +This tells dbt Cloud to run the job whenever a pull request or commit is made, rather than on a schedule. Be sure to turn the schedule of the job off if you don't want it to also run on a time-based cadence. + + + +## Fresh Rebuilds + +As an extension of the Slim CI feature, dbt Cloud can rerun and retest only the things that are fresher compared to a previous run. + + + +Only supported by v1.1 or newer. + + + + + +Only supported by v1.1 or newer. + +:::caution Experimental functionality +The `source_status` selection is experimental and subject to change. During this time, ongoing improvements may limit this feature’s availability and cause breaking changes to its functionality. +::: + +When a job is selected, dbt Cloud will surface the artifacts from that job's most recent successful run. dbt will then use those artifacts to determine the set of fresh sources. In your job commands, you can signal to dbt to run and test only on these fresher sources and their children by including the `source_status:fresher+` argument. This requires both previous and current state to have the `sources.json` artifact be available. Or plainly said, both job states need to run `dbt source freshness`. + +As example: +```bash +# Command step order +dbt source freshness +dbt build --select source_status:fresher+ +``` + + +More example commands in [Pro-tips for workflows](/guides/legacy/best-practices.md#pro-tips-for-workflows). + +## Limitations + +If your temporary PR schemas aren't dropping after a merge or close of the PR, it's likely due to the below scenarios. Open and review the toggles below for recommendations on how to resolve this: + +
+ You used dbt Cloud environment variables in your connection settings page +
+
To resolve this, remove environment variables in your connections settings.
+
+
+
+ You have an empty/blank default schema +
+
To change this, edit and fill in your default schema.
+
+
+
+ You have overridden the generate_schema_name macro +
+
To resolve this, change your macro so that the temporary PR schema name contains the default prefix and review the guidance below: +

+ • ✅ Temporary PR schema name contains the prefix dbt_cloud_pr_ (like dbt_cloud_pr_123_456_marketing)

+ • ❌ Temporary PR schema name doesn't contain the prefix dbt_cloud_pr_ (like marketing).

+
+
+
+
+ You have overridden the generate_database_name macro +
+
If you assume that the project's default connection is to a database named analytics, review the guidance below to resolve this: +

+ • ✅ Database remains the same as the connection default (like analytics)

+ • ❌ Database has changed from the default connection (like dev).

+
+
+
+ + +Make the necessary changes to your project and double-check if the temporary PR schemas drop after a merge or close of the PR. + +## Troubleshooting + +### Reconnecting your dbt project to use dbt Cloud's native integration with GitHub, GitLab, or Azure DevOps + +If your dbt project relies the generic git clone method that clones using SSH and deploy keys to connect to your dbt repo, you need to disconnect your repo and reconnect it using the native GitHub, GitLab, or Azure DevOps integration in order to enable dbt Cloud Slim CI. + +First, make sure you have the [native GitHub authentication](/docs/collaborate/git/connect-github), [native GitLab authentication](/docs/collaborate/git/connect-gitlab), or [native Azure DevOps authentication](/docs/collaborate/git/connect-azure-devops) set up depending on which git provider you use. After you have gone through those steps, head to **Account Settings**, select **Projects** and click on the project you'd like to reconnect through native GitHub, GitLab, or Azure DevOps auth. Then click on the repository link. + +Once you're in the repository page, click **Edit** and then click **Disconnect Repository** at the bottom. + + +Confirm that you'd like to disconnect your repository. You should then see a new **Configure a repository** link in your old repository's place. Click through to the configuration page: + + + +Select the **GitHub**, **GitLab**, or **AzureDevOps** tab and reselect your repository. That should complete the setup of the project and enable you to set up a dbt Cloud CI job. + +### Error messages that refer to schemas from previous PRs + +If you receive a schema-related error message referencing a *previous* PR, this is usually an indicator that you are not using a production job for your deferral and are instead using *self*. If the prior PR has already been merged, the prior PR's schema may have been dropped by the time the Slim CI job for the current PR is kicked off. + +To fix this issue, select a production job run to defer to instead of self. + + +### Production job runs failing at the **Clone Git Repository** step + +dbt Cloud can only checkout commits that belong to the original repository. dbt Cloud _cannot_ checkout commits that belong to a fork of that repository. + +If you receive the following error message at the **Clone Git Repository** step of your job run: + +``` +Error message: +Cloning into '/tmp/jobs/123456/target'... +Successfully cloned repository. +Checking out to e845be54e6dc72342d5a8f814c8b3316ee220312... +Failed to checkout to specified revision. +git checkout e845be54e6dc72342d5a8f814c8b3316ee220312 +fatal: reference is not a tree: e845be54e6dc72342d5a8f814c8b3316ee220312 +``` + +Double-check that your PR isn't trying to merge using a commit that belongs to a fork of the repository attached to your dbt project. diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles.md b/website/docs/docs/deploy/dashboard-status-tiles.md similarity index 74% rename from website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles.md rename to website/docs/docs/deploy/dashboard-status-tiles.md index 9a90aeeb633..d6b4bc84a99 100644 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles.md +++ b/website/docs/docs/deploy/dashboard-status-tiles.md @@ -1,11 +1,11 @@ --- title: "Dashboard status tiles" -id: "cloud-dashboard-status-tiles" +id: "dashboard-status-tiles" description: "Embed Status Tiles in your dashboards to provide consumers with contextual information about the quality and freshness of data." --- ## Overview -In dbt Cloud, the [Metadata API](dbt-cloud/dbt-cloud-api/metadata/metadata-overview) can power Dashboard Status Tiles. A Dashboard Status Tile is placed on a dashboard (specifically: anywhere you can embed an iFrame) to give insight into the quality and freshness of the data feeding into that dashboard. This is done via dbt [exposures](building-a-dbt-project/exposures). +In dbt Cloud, the [Metadata API](/docs/dbt-cloud-apis/metadata-api) can power Dashboard Status Tiles. A Dashboard Status Tile is placed on a dashboard (specifically: anywhere you can embed an iFrame) to give insight into the quality and freshness of the data feeding into that dashboard. This is done via dbt [exposures](/docs/build/exposures). ## Functionality The dashboard status tile looks like this: @@ -19,13 +19,13 @@ The data freshness check fails if any sources feeding into the exposure are stal Clicking into **see details** from the Dashboard Status Tile takes you to a landing page where you can learn more about the specific sources, models, and tests feeding into this exposure. ## Setup -First, be sure to enable [source freshness](dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness) in the job that generates this exposure. +First, be sure to enable [source freshness](/docs/deploy/source-freshness) in the job that generates this exposure. In order to set up your dashboard status tile, here is what you need: -1. **Metadata Token.** You can learn how to set up a metadata only token [here](dbt-cloud/dbt-cloud-api/service-tokens). +1. **Metadata Token.** You can learn how to set up a metadata only token [here](/docs/dbt-cloud-apis/service-tokens). -2. **Exposure name.** You can learn more about how to set up exposures [here](building-a-dbt-project/exposures). +2. **Exposure name.** You can learn more about how to set up exposures [here](/docs/build/exposures). 3. **jobID.** Remember that you can select your jobId directly from the URL when looking at the relevant job in dbt Cloud. @@ -52,3 +52,12 @@ Looker does not allow you to directly embed HTML, and instead requires creating - Once you have set up your custom visualization, you can use it on any dashboard! You can configure it with the exposure name, jobID, and token relevant to that dashboard. + +### Tableau +Tableau does not require you to embed an iFrame. You only need to use a Web Page object on your Tableau Dashboard and a URL in the following format: + +``` +https://metadata.cloud.getdbt.com/exposure-tile?name=&jobId=&token= +``` + + diff --git a/website/docs/docs/running-a-dbt-project/running-dbt-in-production.md b/website/docs/docs/deploy/deployment-overview.md similarity index 59% rename from website/docs/docs/running-a-dbt-project/running-dbt-in-production.md rename to website/docs/docs/deploy/deployment-overview.md index fd41b3bfb37..2bfbaf93e8c 100644 --- a/website/docs/docs/running-a-dbt-project/running-dbt-in-production.md +++ b/website/docs/docs/deploy/deployment-overview.md @@ -1,41 +1,47 @@ --- -title: "Running dbt in Production" -id: "running-dbt-in-production" +title: "About deployments" +id: "deployments" --- -## What does running dbt in production mean? -Running dbt in production simply means **setting up a system to run a dbt job on a schedule**, rather than running dbt commands manually from the command line. These production dbt jobs should create the tables and views that your business intelligence tools and end users query. Before continuing, make sure you understand dbt's approach to [managing environments](managing-environments). +Running dbt in production means setting up a system to run a _dbt job on a schedule_, rather than running dbt commands manually from the command line. Your production dbt jobs should create the tables and views that your business intelligence tools and end users query. Before continuing, make sure you understand dbt's approach to [managing environments](/docs/collaborate/environments). -:::info dbt commands in production +In addition to setting up a schedule, there are other considerations when setting up dbt to run in production: -We've written a guide for the dbt commands we run in production, over on [Discourse](https://discourse.getdbt.com/t/what-are-the-dbt-commands-you-run-in-your-production-deployment-of-dbt/366). - -::: - -As well as setting up a schedule, there are a number of other things you should consider when setting up dbt to run in production, such as: -* The complexity involved in creating a new dbt job, or editing an existing one. -* Setting up notifications if a step within your job returns an error code (e.g. a model cannot be built, or a test fails). +* The complexity involved in creating a new dbt job or editing an existing one. +* Setting up notifications if a step within your job returns an error code (for example, a model can't be built or a test fails). * Accessing logs to help debug any issues. -* Pulling the latest version of your git repo before running dbt (i.e. continuous deployment). -* Running your dbt project before merging code into master (i.e. continuous integration). +* Pulling the latest version of your git repo before running dbt (continuous deployment). +* Running your dbt project before merging code into master (continuous integration). * Allowing access for team members that need to collaborate on your dbt project. -## Ways to run dbt in production -### Using dbt Cloud -We've built [dbt Cloud](https://www.getdbt.com/signup/) from the ground up to empower data teams to easily run dbt in production. With dbt Cloud, you can: +## Run dbt in production + +If you want to run dbt jobs on a schedule, you can use tools such as dbt Cloud, Airflow, Prefect, Dagster, automation server, or Cron. + +## dbt Cloud + +We've built dbt Cloud to empower data teams to easily run dbt in production. If you're interested in trying out dbt Cloud, you can [sign up for an account](https://cloud.getdbt.com/signup/). + +dbt Cloud enables you to: - run your jobs on a schedule - view logs for any historical invocation of dbt - configure error notifications - render your project's documentation -If you're interested in giving dbt Cloud a spin, you can sign up for a *forever free* account [here](https://cloud.getdbt.com/signup/). +In general, the dbt Cloud application deployment models fall into two categories: **Multi Tenant** and **Single Tenant**. These deployments are hosted on infrastructure managed by dbt Labs. Both models leverage AWS infrastructure as described in the [Architecture](/docs/deploy/architecture) section. + +For more information on these deployment models, refer to: - +- [Multi Tenant](/docs/deploy/multi-tenant) +- [Single Tenant](/docs/deploy/single-tenant) + +If you’re interested in learning more about an Enterprise plan, please [contact us](mailto:sales@getdbt.com). + +## Airflow -### Using Airflow If your organization is using [Airflow](https://airflow.apache.org/), there are a number of ways you can run your dbt jobs, including: -* Installing the [dbt Cloud Provider](https://registry.astronomer.io/providers/dbt-cloud) to orchestrate dbt Cloud jobs. This package contains multiple Hooks, Operators, and Sensors to complete various actions within dbt Cloud. See an [example airflow DAG](https://registry.astronomer.io/dags/example-dbt-cloud) to get started! +* Installing the [dbt Cloud Provider](https://registry.astronomer.io/providers/dbt-cloud) to orchestrate dbt Cloud jobs. This package contains multiple Hooks, Operators, and Sensors to complete various actions within dbt Cloud. @@ -44,18 +50,25 @@ If your organization is using [Airflow](https://airflow.apache.org/), there are For more details on both of these methods, including example implementations, check out [this guide](https://www.astronomer.io/guides/airflow-dbt). -### Using Prefect +## Prefect + If your organization is using [Prefect](https://www.prefect.io), use the [DbtShellTask](https://docs.prefect.io/api/latest/tasks/dbt.html#dbtshelltask) to schedule, execute and monitor your dbt runs. Alternatively, you can use the supported [ShellTask](https://docs.prefect.io/api/latest/tasks/shell.html#shelltask) to execute dbt commands through the shell. You can also trigger dbt Cloud jobs with the [DbtCloudRunJob](https://docs.prefect.io/api/latest/tasks/dbt.html#dbtcloudrunjob) task. Running this task will generate a markdown artifact viewable in the Prefect UI. The artifact will contain links to the dbt artifacts generate as a result of the job run. -### Using Dagster +## Dagster + If your organization is using [Dagster](https://dagster.io/), you can use the [dagster_dbt](https://docs.dagster.io/_apidocs/libraries/dagster-dbt) library to integrate dbt commands into your pipelines. This library supports the execution of dbt through dbt Cloud, dbt CLI and the dbt RPC server. Running dbt from Dagster automatically aggregates metadata about your dbt runs. Check out the [example pipeline](https://dagster.io/blog/dagster-dbt) for details. -### Using an automation server +## Automation servers + Automation servers, like CodeDeploy, GitLab CI/CD ([video](https://youtu.be/-XBIIY2pFpc?t=1301)), Bamboo and Jenkins, can be used to schedule bash commands for dbt. They also provide a UI to view logging to the command line, and integrate with your git repository. -### Using cron +## Cron + Cron is a decent way to schedule bash commands. However, while it may seem like an easy route to schedule a job, writing code to take care of all of the additional features associated with a production deployment often makes this route more complex compared to other options listed here. + +## Related docs +- [What are the dbt commands you run in your production deployment of dbt?](https://discourse.getdbt.com/t/what-are-the-dbt-commands-you-run-in-your-production-deployment-of-dbt/366) diff --git a/website/docs/docs/deploy/job-notifications.md b/website/docs/docs/deploy/job-notifications.md new file mode 100644 index 00000000000..f6efb0223ed --- /dev/null +++ b/website/docs/docs/deploy/job-notifications.md @@ -0,0 +1,26 @@ +--- +title: "Job notifications" +id: "job-notifications" +description: "Set up notifications in dbt Cloud to receive Email or Slack alerts for job run status." +--- + +### Overview + +Setting up notifications in dbt Cloud will allow you to receive alerts via Email or a chosen Slack channel when a job run succeeds, fails, or is cancelled. + +### Email + +There are two options for setting up email notifications. As a **user**, you can set up email notifications for yourself under your Profile. As an **admin**, you can set up notifications on behalf of your team members. + +1. Click the gear in the top right and select **Notification settings**. + +2. **As a user:** Select **Edit** and select the type of Notification (Succeeds, Fails, or Is Cancelled) for each Job for which you would like to be notified, or + + **As an admin:** Select one or more users you'd like to set notifications for. If you only see your own name, then you might not have admin privileges. Select **Edit** and select the type of Notification (Succeeds, Fails, or Is Cancelled) for each Job for which they will be notified. + +3. Click **Save**. + + +### Slack + + diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule.md b/website/docs/docs/deploy/job-triggers.md similarity index 51% rename from website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule.md rename to website/docs/docs/deploy/job-triggers.md index 3c48a13f83a..bf6f6ac06d5 100644 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule.md +++ b/website/docs/docs/deploy/job-triggers.md @@ -1,13 +1,14 @@ --- -title: "Custom cron schedules" -id: "cloud-using-a-custom-cron-schedule" +title: "Job triggers" +id: "job-triggers" description: "You can use cron syntax to specify when you want to run a job." --- ### Overview + In dbt Cloud, you can use "cron" syntax to specify when you'd like your job to run. Cron syntax is very expressive, and allows you to completely customize your run schedule. -If you need help coming up with the right cron syntax, we recommend using a tool like crontab.guru. There, you can enter cron snippets and see what they mean in plain english. You can also find some example snippets below. +If you need help coming up with the right cron syntax, we recommend using a tool like `crontab.guru`. There, you can enter cron snippets and see what they mean in plain English. You can also find some example snippets below. ### Examples @@ -18,6 +19,13 @@ If you need help coming up with the right cron syntax, we recommend using a tool - `0 0 */2 * *`: At midnight UTC every other day - `0 0 * * 1`: At midnight UTC every Monday. -A custom cron schedule can be specified in the Triggers section of the Job Settings page. +A custom cron schedule can be specified in the Job Settings page when you edit a job: + +1. Select a job. +2. Click **Settings**. +3. Click **Edit**. +4. In the Triggers section, activate the **Run on schedule** option. +5. Select **Enter custom cron schedule**. +6. Enter the custom cron syntax for the schedule you want. - + diff --git a/website/docs/docs/dbt-cloud/deployments/multi-tenant.md b/website/docs/docs/deploy/multi-tenant.md similarity index 66% rename from website/docs/docs/dbt-cloud/deployments/multi-tenant.md rename to website/docs/docs/deploy/multi-tenant.md index 89a94c41418..94399ff4700 100644 --- a/website/docs/docs/dbt-cloud/deployments/multi-tenant.md +++ b/website/docs/docs/deploy/multi-tenant.md @@ -1,11 +1,11 @@ --- -id: multi-tenant-deployment -title: Multi Tenant (SaaS) +title: Multi tenant +id: multi-tenant --- The Multi Tenant (SaaS) deployment environment refers to the SaaS dbt Cloud application hosted by dbt Labs. This is the most commonly used deployment and is completely managed and maintained by dbt Labs, the makers of dbt. As a SaaS product, a user can quickly [create an account](https://www.getdbt.com/signup/) and get started using the product. The deployment is hosted in AWS and will always contain the latest software updates and bug fixes. For more information about the dbt Cloud Production deployment see the below. -- [Application Data Flows](/docs/dbt-cloud/deployments/deployment-architecture#application-data-flows) -- [Hosted Network Architecture](/docs/dbt-cloud/deployments/deployment-architecture#hosted-network-architecture) +- [Application Data Flows](/docs/deploy/architecture#application-data-flows) +- [Hosted Network Architecture](/docs/deploy/architecture#hosted-network-architecture) diff --git a/website/docs/docs/deploy/regions.md b/website/docs/docs/deploy/regions.md new file mode 100644 index 00000000000..004ed9c70aa --- /dev/null +++ b/website/docs/docs/deploy/regions.md @@ -0,0 +1,19 @@ +--- +title: "Regions" +id: "regions" +description: "Available regions" +--- + +dbt Cloud is hosted in the multiple regions and will always connect to your data platform from the below IP addresses. Be sure to allow traffic from these IPs in your firewall, and include them in any database grants. + +[dbt Cloud Enterprise](https://www.getdbt.com/pricing/) plans can choose to have their account hosted in any of the below regions. Organizations **must** choose a single region per dbt Cloud account. If you need to run dbt Cloud in multiple regions, we recommend using multiple dbt Cloud accounts. + + +| Region | Location | Access URL | IP addresses | Developer plan | Team plan | Enterprise plan | +|--------|----------|------------|--------------|-----------------|------------|------------------| +| North America | us-east-1 (N. Virginia) | cloud.getdbt.com | 52.45.144.63
54.81.134.249
52.22.161.231 | ✅ | ✅ | ✅ | +| EMEA | eu-central-1 (Frankfurt) | emea.dbt.com | 3.123.45.39
3.126.140.248
3.72.153.148 | ❌ | ❌ | ✅ | +| Virtual Private dbt | Customized | Customized | Ask [Support](/guides/legacy/getting-help#dbt-cloud-support) for your IPs | ❌ | ❌ | ✅ | + + + diff --git a/website/docs/docs/dbt-cloud/deployments/single-tenant.md b/website/docs/docs/deploy/single-tenant.md similarity index 82% rename from website/docs/docs/dbt-cloud/deployments/single-tenant.md rename to website/docs/docs/deploy/single-tenant.md index 92945c50eaf..54cd764dadf 100644 --- a/website/docs/docs/dbt-cloud/deployments/single-tenant.md +++ b/website/docs/docs/deploy/single-tenant.md @@ -1,6 +1,6 @@ --- -id: single-tenant-deployment -title: Single Tenant +title: Single tenant +id: single-tenant --- The Single Tenant deployment environment provides a hosted alternative to the Multi Tenant (SaaS) dbt Cloud environment. While still managed and maintained by dbt Labs, the Single Tenant environment provides dedicated infrastructure with one or more instances of dbt Cloud that can only be accessed by a single customer. This is accomplished by spinning up all the necessary infrastructure with a re-usable Infrastructure as Code (IaC) deployment built with [Terraform](https://www.terraform.io/). The Single Tenant infrastructure lives in a dedicated AWS account and can be customized with certain configurations such as Firewall rules to limit ingress traffic or hosting in a specific AWS Region. @@ -13,5 +13,5 @@ _To learn more about setting up a dbt Cloud Single Tenant deployment, [please co For more information about the dbt Cloud Single Tenant deployment see the below. -- [Application Data Flows](/docs/dbt-cloud/deployments/deployment-architecture#application-data-flows) -- [Hosted Network Architecture](/docs/dbt-cloud/deployments/deployment-architecture#hosted-network-architecture) +- [Application Data Flows](/docs/deploy/architecture#application-data-flows) +- [Hosted Network Architecture](/docs/deploy/architecture#hosted-network-architecture) diff --git a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness.md b/website/docs/docs/deploy/source-freshness.md similarity index 80% rename from website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness.md rename to website/docs/docs/deploy/source-freshness.md index ca0073d30c4..5056e258eda 100644 --- a/website/docs/docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness.md +++ b/website/docs/docs/deploy/source-freshness.md @@ -1,22 +1,23 @@ --- title: "Source freshness" -id: "cloud-snapshotting-source-freshness" +id: "source-freshness" description: "Validate that data freshness meets expectations and alert if stale." --- ## Data Source Freshness -dbt Cloud provides a helpful interface around dbt's [source data freshness](using-sources#snapshotting-source-data-freshness) calculations. When a dbt Cloud job is configured to snapshot source data freshness, dbt Cloud will render a user interface showing you the state of the most recent snapshot. This interface is intended to help you determine if your source data freshness is meeting the SLAs that you've defined for your organization. +dbt Cloud provides a helpful interface around dbt's [source data freshness](/docs/build/sources#snapshotting-source-data-freshness) calculations. When a dbt Cloud job is configured to snapshot source data freshness, dbt Cloud will render a user interface showing you the state of the most recent snapshot. This interface is intended to help you determine if your source data freshness is meeting the SLAs that you've defined for your organization. - + ### Enabling source freshness snapshots -First, make sure to configure your sources to [snapshot freshness information](using-sources#snapshotting-source-data-freshness). +First, make sure to configure your sources to [snapshot freshness information](/docs/build/sources#snapshotting-source-data-freshness). - - **v0.21.0:** Renamed `dbt source snapshot-freshness` to `dbt source freshness`. If using an older version of dbt, the command is `snapshot-freshness`. + - **v0.21.0:** Renamed `dbt source snapshot-freshness` to `dbt source freshness`. If using an older version of dbt, the command is `snapshot-freshness`. + To have dbt Cloud display data source freshness as a rendered user interface, you will still need to use the pre-v0.21 syntax of `dbt source snapshot-freshness`. diff --git a/website/docs/docs/running-a-dbt-project/using-the-cli.md b/website/docs/docs/get-started/about-the-cli.md similarity index 79% rename from website/docs/docs/running-a-dbt-project/using-the-cli.md rename to website/docs/docs/get-started/about-the-cli.md index a12c50cde40..cc3639d933f 100644 --- a/website/docs/docs/running-a-dbt-project/using-the-cli.md +++ b/website/docs/docs/get-started/about-the-cli.md @@ -1,5 +1,6 @@ --- -title: "Using the CLI" +title: "About the CLI" +id: "about-the-cli" --- dbt ships with a Command Line Interface (CLI) for running your dbt project. This way of running dbt a dbt project is free and open source. @@ -9,6 +10,8 @@ To use the CLI, your workflow generally looks like: * **Run your project from the command line:** * macOS ships with a default Terminal program, however you can also use iTerm or the command line prompt within a code editor to execute dbt commands +The CLI is also available for dbt Cloud. Additional components must be installed for the CLI to communicate via dbt Cloud APIs. For more information, visit the [dbt Cloud CLI GitHub repository](https://github.com/data-mie/dbt-cloud-cli) + :::info How we set up our computers for working on dbt projects We've written a [guide](https://discourse.getdbt.com/t/how-we-set-up-our-computers-for-working-on-dbt-projects/243) for our recommended setup when running dbt projects using the CLI. diff --git a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database.md b/website/docs/docs/get-started/connect-your-database.md similarity index 55% rename from website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database.md rename to website/docs/docs/get-started/connect-your-database.md index 2ef4726b44b..656288be68a 100644 --- a/website/docs/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database.md +++ b/website/docs/docs/get-started/connect-your-database.md @@ -1,44 +1,43 @@ --- -title: "Connecting your database" -id: "connecting-your-database" +title: "Connect your database" +id: "connect-your-database" --- +You can connect to your database in dbt Cloud by clicking the gear in the top right and then selecting **Account Settings**. From the Account Settings page, click **+ New Project**. + ## IP Restrictions -dbt Cloud will always connect to your warehouse from the following IP addresses. -Be sure to allow traffic from these IPs in your firewall, and include them in -any database grants. +dbt Cloud will always connect to your data platform from the IP addresses specified in the [Regions](/docs/deploy/regions) page. -- 52.45.144.63 -- 54.81.134.249 -- 52.22.161.231 +Be sure to allow traffic from these IPs in your firewall, and include them in any database grants. -- `54.81.134.249` and `52.22.161.231` were added in November, 2020 +- November 2020 — add the IPs `54.81.134.249` and `52.22.161.231` +- September 2022 — Add EMEA IPs -Allowing these IP addresses only enables the connection to your data warehouse. However, you might want to send API requests from your restricted network to the dbt Cloud API. For example, you could use the API to send a POST request that [triggers a job to run](https://docs.getdbt.com/dbt-cloud/api-v2#operation/triggerRun). Using the dbt Cloud API requires that you allow the `cloud.getdbt.com` subdomain. For more on the dbt Cloud architecture, see "[Deployment architecture](deployment-architecture)." +Allowing these IP addresses only enables the connection to your . However, you might want to send API requests from your restricted network to the dbt Cloud API. For example, you could use the API to send a POST request that [triggers a job to run](https://docs.getdbt.com/dbt-cloud/api-v2#operation/triggerRun). Using the dbt Cloud API requires that you allow the `cloud.getdbt.com` subdomain. For more on the dbt Cloud architecture, see [Deployment architecture](/docs/deploy/architecture). -## Connecting to Redshift and Postgres +## Connecting to Postgres, Redshift, and AlloyDB -The following fields are required when creating a Redshift connection: +The following fields are required when creating a Postgres, Redshift, or AlloyDB connection: | Field | Description | Examples | | ----- | ----------- | -------- | -| Host Name | The hostname of the Postgres or Redshift database to connect to. This can either be a hostname or an IP address. | `xxx.us-east-1.amazonaws.com` | +| Host Name | The hostname of the Postgres, Redshift, or AlloyDB database to connect to. This can either be a hostname or an IP address. | `xxx.us-east-1.amazonaws.com` | | Port | Usually 5432 (Postgres) or 5439 (Redshift) | `5439` | | Database | The logical database to connect to and run queries against. | `analytics` | - +**Note**: When you set up a Redshift or Postgres connection in dbt Cloud, SSL-related parameters aren't available as inputs. ### Connecting via an SSH Tunnel -To connect to a Postgres or Redshift instance via an SSH tunnel, check the "Use SSH Tunnel" option when creating your connection. When configuring the tunnel, you'll need to supply the hostname, username, and port for the bastion server. +To connect to a Postgres, Redshift, or AlloyDB instance via an SSH tunnel, select the **Use SSH Tunnel** option when creating your connection. When configuring the tunnel, you must supply the hostname, username, and port for the bastion server. Once the connection is saved, a public key will be generated and displayed for the Connection. You can copy this public key to the bastion server to authorize dbt Cloud to connect to your database via the bastion server. @@ -50,12 +49,13 @@ The following fields are required when creating a Snowflake connection: | Field | Description | Examples | | ----- | ----------- | -------- | -| Account | The Snowflake account to connect to. Take a look [here](snowflake-profile#account) to determine what the account field should look like based on your region.| `db5261993`,`db5261993.east-us-2.azure` | -| Role | An optional field indicating what role should be assumed after connecting to Snowflake | `transformer` | +| Account | The Snowflake account to connect to. Take a look [here](/reference/warehouse-setups/snowflake-setup#account) to determine what the account field should look like based on your region.| | +| Role | A mandatory field indicating what role should be assumed after connecting to Snowflake | `transformer` | | Database | The logical database to connect to and run queries against. | `analytics` | | Warehouse | The virtual warehouse to use for running queries. | `transforming` | -![Snowflake connection details](/img/docs/dbt-cloud/snowflake-conn-details.png) + +**Note:** A crucial part of working with dbt atop Snowflake is ensuring that users (in development environments) and/or service accounts (in deployment to production environments) have the correct permissions to take actions on Snowflake! Here is documentation of some [example permissions to configure Snowflake access](snowflake-permissions). ### Username / Password @@ -66,7 +66,8 @@ Development or Deployment credentials in a dbt project. Simply enter your Snowfl username (specifically, the `login_name`) and the corresponding user's Snowflake `password` to authenticate dbt Cloud to run queries against Snowflake on behalf of a Snowflake user. -![Snowflake username/password auth](/img/docs/dbt-cloud/snowflake-userpass-auth.png) +**Note**: The schema field in the **Developer Credentials** section is a required field. + ### Key Pair **Available in:** Development environments, Deployment environments @@ -87,25 +88,22 @@ using a key pair. In order to successfully fill in the Private Key field, you **must** include the commented lines below when you add the passphrase. Leaving the `PRIVATE KEY PASSPHRASE` field empty will return an error - have a look at the examples below: - **Example:** ```sql -----BEGIN ENCRYPTED PRIVATE KEY----- < encrypted private key contents here > -----END ENCRYPTED PRIVATE KEY----- ``` - -![Snowflake keypair auth](/img/docs/dbt-cloud/snowflake-keypair-auth.png) + ### Snowflake OAuth + **Available in:** Development environments, Enterprise plans only The OAuth auth method permits dbt Cloud to run development queries on behalf of a Snowflake user without the configuration of Snowflake password in dbt Cloud. For -more information on configuring a Snowflake OAuth connection in dbt Cloud, please see -[the docs on setting up Snowflake OAuth](setting-up-enterprise-snowflake-oauth). - -![Configuring Snowflake OAuth for a connection](/img/docs/dbt-cloud/dbt-cloud-enterprise/1bd0c42-Screen_Shot_2020-03-10_at_6.20.05_PM.png) +more information on configuring a Snowflake OAuth connection in dbt Cloud, please see [the docs on setting up Snowflake OAuth](/docs/collaborate/manage-access/set-up-snowflake-oauth). + ## Connecting to BigQuery @@ -113,7 +111,7 @@ more information on configuring a Snowflake OAuth connection in dbt Cloud, pleas :::info Uploading a service account JSON keyfile -While the fields in a BigQuery connection can be specified manually, we recommend uploading a service account JSON keyfile to quickly and accurately configure a connection to BigQuery. +While the fields in a BigQuery connection can be specified manually, we recommend uploading a service account keyfile to quickly and accurately configure a connection to BigQuery. ::: @@ -145,19 +143,46 @@ In addition to these fields, there are two other optional fields that can be con The OAuth auth method permits dbt Cloud to run development queries on behalf of a BigQuery user without the configuration of BigQuery service account keyfile in dbt Cloud. For more information on the initial configuration of a BigQuery OAuth connection in dbt Cloud, please see -[the docs on setting up BigQuery OAuth](cloud-setting-up-bigquery-oauth). +[the docs on setting up BigQuery OAuth](/docs/collaborate/manage-access/set-up-bigquery-oauth). As an end user, if your organization has set up BigQuery OAuth, you can link a project with your personal BigQuery account in your personal Profile in dbt Cloud, like so: - ## Connecting to Databricks +You can connect to Databricks by using one of two supported adapters: [dbt-databricks](/connect-your-database#dbt-databricks) and [dbt-spark](/connect-your-database#dbt-spark). For accounts on dbt 1.0 or later, we recommend using the dbt-databricks adapter. The dbt-databricks adapter is maintained by the Databricks team and is verified by dbt Labs. The Databricks team is committed to supporting and improving the adapter over time, so you can be sure the integrated experience will provide the best of dbt and the best of Databricks. Connecting to Databricks via dbt-spark will be deprecated in the future. + +### dbt-databricks Adapter +dbt-databricks is compatible with the following versions of dbt Core in dbt Cloud with varying degrees of functionality. -### ODBC +| Feature | dbt Versions | +| ----- | ----------- | +| dbt-databricks | Available starting with dbt 1.0 in dbt Cloud| +| Unity Catalog | Available starting with dbt 1.1 | +| Python models | Available starting with dbt 1.3 | + +The dbt-databricks adapter offers: +- **Easier set up** +- **Better defaults:** +The dbt-databricks adapter is more opinionated, guiding users to an improved experience with less effort. Design choices of this adapter include defaulting to Delta format, using merge for incremental models, and running expensive queries with Photon. +- **Support for Unity Catalog:** +Unity Catalog allows Databricks users to centrally manage all data assets, simplifying access management and improving search and query performance. Databricks users can now get three-part data hierarchies – catalog, schema, model name – which solves a longstanding friction point in data organization and governance. + + +To set up the Databricks connection, supply the following fields: + +| Field | Description | Examples | +| ----- | ----------- | -------- | +| Server Hostname | The hostname of the Databricks account to connect to | dbc-a2c61234-1234.cloud.databricks.com | +| HTTP Path | The HTTP path of the Databricks cluster or SQL warehouse | /sql/1.0/warehouses/1a23b4596cd7e8fg | +| Catalog | Name of Databricks Catalog (optional) | Production | + + + +### dbt-spark Adapter dbt Cloud supports connecting to Databricks using -[a Cluster](https://docs.databricks.com/clusters/index.html) or -[a SQL Endpoint](https://docs.databricks.com/sql/admin/sql-endpoints.html). +[a Cluster](https://docs.databricks.com/clusters/index.html) or +[a SQL Warehouse (formerly called SQL endpoints)](https://docs.databricks.com/sql/admin/sql-endpoints.html). Depending on how you connect to Databricks, either one of the `Cluster` or `Endpoint` configurations must be provided, but setting _both_ values is not allowed. @@ -166,14 +191,14 @@ The following fields are available when creating a Databricks connection: | Field | Description | Examples | | ----- | ----------- | -------- | -| Host Name | The hostname of the Databricks account to connect to | `avc-def1234ghi-9999.cloud.databricks.com` | +| Hostname | The hostname of the Databricks account to connect to | dbc-a2c61234-1234.cloud.databricks.com | | Port | The port to connect to Databricks for this connection | 443 | -| Organization | Optional (default: 0) | 0123456789 | +| Organization | Optional (default: 0) | 1123456677899012 | | Cluster | The ID of the cluster to connect to (required if using a cluster) | 1234-567890-abc12345 | -| Endpoint | The ID of the endpoint to connect to (required if using Databricks SQL) | 0123456789 | +| Endpoint | The ID of the endpoint to connect to (required if using Databricks SQL) | 1a23b4596cd7e8fg | | User | Optional | dbt_cloud_user | - + ## Connecting to Apache Spark diff --git a/website/docs/dbt-cli/configure-your-profile.md b/website/docs/docs/get-started/connection-profiles.md similarity index 77% rename from website/docs/dbt-cli/configure-your-profile.md rename to website/docs/docs/get-started/connection-profiles.md index 4b5958026ca..b1614a0deeb 100644 --- a/website/docs/dbt-cli/configure-your-profile.md +++ b/website/docs/docs/get-started/connection-profiles.md @@ -1,5 +1,6 @@ --- -title: "Configuring your profile" +title: "Connection profiles" +id: "connection-profiles" description: "Configure your profile using the command line." --- @@ -9,7 +10,7 @@ description: "Configure your profile using the command line." ## Connecting to your warehouse using the command line -When you invoke dbt from the command line, dbt parses your `dbt_project.yml` and obtains the `profile` name, which dbt needs to connect to your data warehouse. +When you invoke dbt from the command line, dbt parses your `dbt_project.yml` and obtains the `profile` name, which dbt needs to connect to your . @@ -24,7 +25,18 @@ profile: 'jaffle_shop' dbt then checks your `profiles.yml` file for a profile with the same name. A profile contains all the details required to connect to your data warehouse. -This file generally lives outside of your dbt project to avoid sensitive credentials being check in to version control. By default, dbt expects the `profiles.yml` file to be located in the `~/.dbt/` directory. + + +By default, dbt expects the `profiles.yml` file to be located in the `~/.dbt/` directory. + + + + +dbt will search the current working directory for the `profiles.yml` file and will default to the `~/.dbt/` directory if not found. + + + +This file generally lives outside of your dbt project to avoid sensitive credentials being checked in to version control, but `profiles.yml` can be safely checked in when [using environment variables](#advanced-using-environment-variables) to load sensitive credentials. @@ -59,7 +71,7 @@ A profile consists of _targets_, and a specified _default target_. Each _target_ specifies the type of warehouse you are connecting to, the credentials to connect to the warehouse, and some dbt-specific configurations. -The credentials you need to provide in your target varies across warehouses – sample profiles for each supported warehouse are available in the [Supported Adapters](available-adapters) section. +The credentials you need to provide in your target varies across warehouses &mdash sample profiles for each supported warehouse are available in the [Supported Data Platforms](supported-data-platforms) section. **Pro Tip:** You may need to surround your password in quotes if it contains special characters. More details [here](https://stackoverflow.com/a/37015689/10415173). @@ -85,11 +97,11 @@ Use the [debug](debug) command to check whether you can successfully connect to ## Understanding targets in profiles -dbt supports multiple targets within one profile to encourage the use of separate development and production environments as discussed in [Managing Environments](managing-environments). +dbt supports multiple targets within one profile to encourage the use of separate development and production environments as discussed in [Managing Environments](/docs/collaborate/environments). A typical profile for an analyst using dbt locally will have a target named `dev`, and have this set as the default. -You may also have a `prod` target within your profile, which creates the objects in your production schema. However, since it's often desirable to perform production runs on a schedule, we recommend deploying your dbt project on a separate machine to your local machine. As such, most dbt users will only have a dev target in their profile on their local machine. +You may also have a `prod` target within your profile, which creates the objects in your production schema. However, since it's often desirable to perform production runs on a schedule, we recommend deploying your dbt project to a separate machine other than your local machine. Most dbt users only have a `dev` target in their profile on their local machine. If you do have multiple targets in your profile, and want to use a target other than the default, you can do this using the `--target` option when issuing a dbt command. @@ -125,7 +137,7 @@ In development, a pattern we’ve found to work well is to name the schema in yo Note that there’s no need to create your target schema beforehand – dbt will check if the schema already exists when it runs, and create it if it doesn’t. -While the target schema represents the default schema that dbt will use, it may make sense to split your models into separate schemas, which can be done by using [custom schemas](using-custom-schemas). +While the target schema represents the default schema that dbt will use, it may make sense to split your models into separate schemas, which can be done by using [custom schemas](/docs/build/custom-schemas). ## Understanding threads @@ -143,7 +155,25 @@ You can use a different number of threads than the value defined in your target ## Advanced: Customizing a profile directory -By default, dbt expects your `profiles.yml` file to be located in the `~/.dbt/` directory. To check the expected location of your `profiles.yml` file for your installation of dbt, you can run the following: +The parent directory for `profiles.yml` is determined using the following precedence: + + + +1. `--profiles-dir` option +1. `DBT_PROFILES_DIR` environment variable +1. `~/.dbt/` directory + + + + +1. `--profiles-dir` option +1. `DBT_PROFILES_DIR` environment variable +1. current working directory +1. `~/.dbt/` directory + + + +To check the expected location of your `profiles.yml` file for your installation of dbt, you can run the following: ```bash $ dbt debug --config-dir @@ -152,13 +182,13 @@ To view your profiles.yml file, run: open /Users/alice/.dbt ``` -You may want to have your `profiles.yml` file stored in a different directory – for example, if you are using environment variables to load your credentials, you might choose to include this file in your version controlled dbt project, and direct dbt to load the file from there. +You may want to have your `profiles.yml` file stored in a different directory than `~/.dbt/` – for example, if you are [using environment variables](#advanced-using-environment-variables) to load your credentials, you might choose to include this file in the root directory of your dbt project. Note that the file always needs to be called `profiles.yml`, regardless of which directory it is in. -There are two ways to direct dbt to a different location for your `profiles.yml` file: +There are multiple ways to direct dbt to a different location for your `profiles.yml` file: -**1. Use the `--profiles-dir` option when executing a dbt command** +### 1. Use the `--profiles-dir` option when executing a dbt command This option can be used as follows: ``` @@ -167,13 +197,12 @@ $ dbt run --profiles-dir path/to/directory If using this method, the `--profiles-dir` option needs to be provided every time you run a dbt command. -**2. Use the `DBT_PROFILES_DIR` environment variable to change the default location** +### 2. Use the `DBT_PROFILES_DIR` environment variable to change the default location Specifying this environment variable overrides the directory that dbt looks for your `profiles.yml` file in. You can specify this by running: ``` $ export DBT_PROFILES_DIR=path/to/directory ``` -If the `--profiles-dir` option is used in a dbt command, it will take precedence over this environment variable. ## Advanced: Using environment variables -Credentials can either be placed directly into the `profiles.yml` file, or they can be loaded from environment variables. This is especially useful for production deployments of dbt. You can find more information about using environment variables [here](env_var). +Credentials can be placed directly into the `profiles.yml` file or loaded from environment variables. Using environment variables is especially useful for production deployments of dbt. You can find more information about environment variables [here](env_var). diff --git a/website/docs/docs/get-started/dbt-cloud-features.md b/website/docs/docs/get-started/dbt-cloud-features.md new file mode 100644 index 00000000000..c5963e53c4f --- /dev/null +++ b/website/docs/docs/get-started/dbt-cloud-features.md @@ -0,0 +1,47 @@ +--- +title: "dbt Cloud features" +id: "dbt-cloud-features" +--- + + +## dbt Cloud IDE + + + +The dbt Cloud [integrated development environment (IDE)](/docs/get-started/develop-in-the-cloud) allows you to build, test, run, and version control your dbt projects directly from your browser. The IDE is the fastest and most reliable way to deploy dbt, and provides a real-time editing and execution environment for your dbt project — no command line required. Anyone can use the IDE, from new dbt developers to seasoned practitioners. + +With the Cloud IDE, you can: + +- Write modular SQL models with `select` statements and the [ref()](/reference/dbt-jinja-functions/ref) function +- Compile dbt code into SQL and execute it against your database directly +- Test every model before deploying them to production +- Generate and view documentation of your dbt project +- Leverage [git and version-control your code](/docs/collaborate/git/version-control-basics) from your browser with a couple of clicks +- Create and test Python models: + * You can `compile` Python models to see the full function that gets executed in your data platform + * You can see Python models in DAG in dbt version 1.3 and higher + * You can't preview python models, currently +- Visualize a directed acyclic graph (DAG), [and more](/docs/get-started/dbt-cloud-tips) + +## IDE features +The dbt Cloud IDE comes with features, including better performance and exciting enhancements, making it easier for you to develop, build, compile, run and test data models. Check out the some of the features below to learn more: + + +| Feature | Info | +|---|---| +| **File state indicators** | Ability to see when changes or actions have been made to the file. The indicators **M, U,** and **•** appear to the right of your file or folder name and indicate the actions performed:

- Unsaved **(•)** — The IDE detects unsaved changes to your file/folder
- Modification **(M)** — The IDE detects a modification of existing files/folders
- Untracked **(U)** — The IDE detects changes made to new files or renamed files +| **Build, test, and run code** | Build, test, and run your project with a button click or by using the Cloud IDE command bar. +| **Drag and drop** | Drag and drop files located in the file explorer, and use the file breadcrumb on the top of the IDE for quick, linear navigation. Access adjacent files in the same file by right clicking on the breadcrumb file. +| **Organize tabs** | You can:
- Move your tabs around to reorganize your work in the IDE
- Right-click on a tab to view and select a list of actions to take
- Close multiple, unsaved tabs to batch save your work +| **Multiple selections** | You can make multiple selections for small and simultaneous edits. The below commands are a common way to add more cursors and allow you to insert cursors below or above with ease.

- Option-Command-Down arrow
- Option-Command-Up arrow
- Press Option and click on an area +| **Formatting** | Format your files with a click of a button, powered by [sqlfmt](http://sqlfmt.com/). +| **Git diff view** | Ability to see what has been changed in a file before you make a pull request. +| **dbt autocomplete** | There are four new types of autocomplete features to help you develop faster:
- Use `ref` to autocomplete your model names
- Use `source` to autocomplete your source name + table name
- Use `macro` to autocomplete your arguments
- Use `env var` to autocomplete env var +| **Dark mode** | Use dark mode in the Cloud IDE for a great viewing experience in low-light environments. + + +## Related docs + +- [dbt Cloud tips](/docs/get-started/dbt-cloud-tips) +- [Develop in the Cloud](docs/get-started/develop-in-the-cloud) +- [Guides](/docs/get-started/getting-started/overview) diff --git a/website/docs/docs/get-started/dbt-cloud-tips.md b/website/docs/docs/get-started/dbt-cloud-tips.md new file mode 100644 index 00000000000..687c6c97643 --- /dev/null +++ b/website/docs/docs/get-started/dbt-cloud-tips.md @@ -0,0 +1,95 @@ +--- +title: "dbt Cloud tips" +id: dbt-cloud-tips +description: "Check out any dbt Cloud and IDE-related tips." +sidebar_label: "dbt Cloud tips" +--- + +# dbt Cloud tips + +The Cloud IDE provides keyboard shortcuts, features, and development tips to help you work faster and be more productive. Use this Cloud IDE cheat sheet to help you quickly reference some common operations. + +## Most popular + +**Organize tabs** + +You can move your tabs around to reorganize your work in the IDE. You can also right-click on a tab to close multiple tabs or view and select a list of actions to take. Right-click in the editor to access other options, including text wrap. + +**Drag and drop** + +You can also drag and drop files in the file tree or in the editor. Use the file breadcrumb on the top of the IDE for quicker navigation. You can access adjacent files in the same file by right-clicking on the breadcrumb file. + +**Search across files** + +You can quickly search over all files in the IDE on your current project. To search, open the search bar by pressing Command-O to find text across all files in your current project and write your filename. You can view the results under the search text, which are grouped into files containing the match. You can click on the results to view them in the IDE. + +**Command bar and status** + +You can run commands from the command bar at the bottom of the IDE or by using the **Build** button. Use the [rich model selection syntax](/docs/reference/node-selection/syntax) to run [dbt commands](/docs/reference/dbt-commands) directly within dbt Cloud. You can also view the history, status, and logs of previous runs by clicking **Runs**. + +The status icon on the lower right corner of the IDE gives you an indicator of the health of your project. You can identify errors by clicking on the status icon for more details or by clicking **Restart the IDE**. + +**Find and replace** + +Press Command-F or Control-F to open the find-and-replace bar in the upper right corner of the current file in the IDE. The IDE highlights your search results in the current file and code outline. You can use the up and down arrows to see the match highlighted in the current file when there are multiple matches. To replace the text with something else, use the left arrow. + +**DAG in the IDE** + +Double-click a node in the directed acyclic graph (DAG) to open that file in a new tab. Click **Expand** on the upper right side of the DAG and use node selection syntax (`select` or `exclude`) to view a subset of your DAG. + +## IDE Keyboard shortcuts + +There are default keyboard shortcuts that can help make development more productive and easier for everyone. + +- Command-O or Control-O to select a file to open +- Command-P or Control-P to see command palette +- Hold Option-click-on-an-area to select multiple lines and perform a multi-edit. You can also press Command-E to perform this operation on the command line. +- Press Fn-F1 to view a list of the other editor shortcuts +- Command-Enter or Control-Enter to Preview your code +- Command-Shift-Enter or Control-Shift-Enter to Compile +- Highlight a portion of code and use the above shortcuts to Preview or Compile code +- Enter two underscores (__) in the IDE to reveal a list of dbt functions + +## Multiple selections + +You can make multiple selections for small and simultaneous edits. The below commands are a common way to add more cursors and allow you to insert cursors below or above with ease. + +- Option-Command-Down arrow +- Option-Command-Up arrow +- Press Option and click on an area + +## Package tips + +- Use the [dbt_codegen](https://hub.getdbt.com/dbt-labs/codegen/latest/) package to help you generate YML files for your models and sources and SQL files for your staging models. +- The [dbt_utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/) package contains macros useful for daily development. For example, `date_spine` generates a table with all dates between the ones provided as parameters. +- The [dbt_project_evaluator](https://hub.getdbt.com/dbt-labs/dbt_project_evaluator/latest) package compares your dbt project against a list of our best practices and provides suggestions and guidelines on how to update your models. +- The [dbt_expectations](https://hub.getdbt.com/calogica/dbt_expectations/latest) package contains many tests beyond those built into dbt Core. +- The [dbt_audit_helper](https://hub.getdbt.com/#:~:text=adwords-,audit_helper,-codegen) package lets you compare the output of 2 queries. Use it when refactoring existing logic to ensure that the new results are identical. +- The [dbt_artifacts](https://hub.getdbt.com/brooklyn-data/dbt_artifacts/latest) package saves information about your dbt runs directly to your data platform so that you can track the performance of models over time. +- The [dbt_meta_testing](https://hub.getdbt.com/tnightengale/dbt_meta_testing/latest) package checks that your dbt project is sufficiently tested and documented. + +## Advanced tips + +- Use your folder structure as your primary selector method. `dbt build --select marts.marketing` is simpler and more resilient than relying on tagging every model. +- Think about jobs in terms of build cadences and SLAs. Run models that have hourly, daily, or weekly build cadences together. +- Use the [where config](/docs/reference/resource-configs/where) for tests to test an assertion on a subset of records. +- [store_failures](/docs/reference/resource-configs/store_failures) lets you examine records that cause tests to fail, so you can either repair the data or change the test as needed. +- Use [severity](/docs/reference/resource-configs/severity) thresholds to set an acceptable number of failures for a test. +- Use [incremental_strategy](/docs/build/incremental-models#about-incremental_strategy) in your incremental model config to implement the most effective behavior depending on the volume of your data and reliability of your unique keys. +- Set `vars` in your `dbt_project.yml` to define global defaults for certain conditions, which you can then override using the `--vars` flag in your commands. +- Use [for loops](/docs/get-started/learning-more/using-jinja#use-a-for-loop-in-models-for-repeated-sql) in Jinja to [DRY](https://docs.getdbt.com/terms/dry) up repetitive logic, such as selecting a series of columns that all require the same transformations and naming patterns to be applied. +- Instead of relying on post-hooks, use the [grants config](/docs/reference/resource-configs/grants) to apply permission grants in the warehouse resiliently. +- Define [source-freshness](/docs/build/sources#snapshotting-source-data-freshness) thresholds on your sources to avoid running transformations on data that has already been processed. +- Use the `+` operator on the left of a model `dbt build --select +model_name` to run a model and all of its upstream dependencies. Use the `+` operator on the right of the model `dbt build --select model_name+` to run a model and everything downstream that depends on it. +- Use `dir_name` to run all models in a package or directory. +- Use the `@` operator on the left of a model in a non-state-aware CI setup to test it. This operator runs all of a selection’s parents and children, and also runs the parents of its children, which in a fresh CI schema will likely not exist yet. +- Use the [--exclude flag](/docs/reference/node-selection/exclude) to remove a subset of models out of a selection. +- Use [state and deferral](/docs/deploy/cloud-ci-job#deferral-and-state-comparison) to create a slim CI setup. +- Use the [--full-refresh](/docs/reference/commands/run#refresh-incremental-models) flag to rebuild an incremental model from scratch. +- Use [seeds](/docs/build/seeds) to create manual lookup tables, like zip codes to states or marketing UTMs to campaigns. `dbt seed` will build these from CSVs into your warehouse and make them `ref` able in your models. +- Use [target.name](/docs/build/custom-schemas#an-alternative-pattern-for-generating-schema-names) to pivot logic based on what environment you’re using. For example, to build into a single development schema while developing, but use multiple schemas in production. + +## Related docs + +- [Getting started](/docs/get-started/getting-started/overview) +- [Develop in the Cloud](/docs/get-started/develop-in-the-cloud) diff --git a/website/docs/docs/get-started/develop-in-the-cloud.md b/website/docs/docs/get-started/develop-in-the-cloud.md new file mode 100644 index 00000000000..a29e18082ed --- /dev/null +++ b/website/docs/docs/get-started/develop-in-the-cloud.md @@ -0,0 +1,184 @@ +--- +title: "Develop in the Cloud" +id: develop-in-the-cloud +description: "Develop, test, run, and build in the Cloud IDE." +sidebar_label: Develop in the Cloud +--- + + + +The dbt Cloud integrated development environment (IDE) is a single interface for building, testing, running, and version-controlling dbt projects from your browser. With the Cloud IDE, you can compile dbt code into SQL and run it against your database directly. + +The IDE leverages the open-source [dbt-rpc](/reference/commands/rpc) plugin to recompile only the changes made in your project. + +## Prerequisites + +To develop in the Cloud IDE, make sure you have the following: + +- Your dbt project must be compatible with dbt version 0.15.0 or higher. The dbt IDE is powered by the [dbt-rpc](/reference/commands/rpc) which was overhauled in dbt v0.15.0 +- You must have a [dbt Cloud account](https://cloud.getdbt.com/) and [Developer seat license](/docs/collaborate/manage-access/seats-and-users) +- You must have a git repository set up and your git provider must have `write` access enabled. See [Connecting your GitHub Account](/docs/collaborate/git/connect-github) and [Importing a project by git URL](/docs/collaborate/git/import-a-project-by-git-url) for detailed setup instructions +- Your dbt project must be connected to a [data platform](/docs/get-started/connect-your-database) +- You must have a [**development environment** and **development credentials**](#set-up-and-access-the-cloud-ide) set up +- The environment must be on dbt version 1.0 or higher + +### Start up and work retention in the IDE + + + +
Start up process Work retention
+ +There are three start-up states when using or launching the Cloud IDE: + +- Creation start — This is the state where you are starting the IDE for the first time. You can also view this as a *cold start* (see below), and you can expect this state to take longer because the git repository is being cloned. +- Cold start — This is the process of starting a new develop session, which will be available for you for three hours. The environment automatically turns off three hours after the last activity with the rpc server. This includes compile, preview, or any dbt invocation, however, it *does not* include editing and saving a file. +- Hot start — This is the state of resuming an existing or active develop session within three hours of the last activity. + + + + + +The Cloud IDE needs explicit action to save your changes. There are three ways your work is stored: + +- Unsaved, local code — Any code you write is automatically available from your browser’s storage. You can see your changes but will lose them if you switch branches or browsers (another device or browser). +- Saved but uncommitted code — When you save a file, the data gets stored in your local storage (EFS storage). If you switch branches but don’t *commit* your saved changes, you will lose your changes. +- Committed code — This is stored in the branch with your git provider and you can check out other (remote) branches. + + +
+ +## Set up and access the Cloud IDE + +:::info📌 + +New to dbt? Check out our [Getting Started guide](/docs/get-started/getting-started/overview) to build your first dbt project in the Cloud IDE! + +::: + +In order to start experiencing the great features of the Cloud IDE, you need to first set up your **Development environment** and **Development credentials.** + +If you’re new to dbt, you will automatically add this during the project setup. However, if you have an existing dbt Cloud account, you may need to create a development environment and credentials manually to use the Cloud IDE. + +Review the steps below to set up your development environment and credentials: + + +**Development environment** + +1. Create a development environment and choose **Deploy** and then **Environments** from the top left. Click **Create Environment**. + + + +2. Enter an environment name that would help you identify it among your other environments (for example, `Nate's Development Environment`). +3. Choose **Development** as the **Environment Type**. +4. You can also select which **dbt Version** to use at this time. For compatibility reasons, we recommend that you select the same dbt version that you plan to use in your deployment environment. +5. Click **Save** to finish creating your **Development environment**. + + + + + +**Developer credentials** + +The IDE uses developer credentials to connect to your data platform. These developer credentials should be specific to your user and they should *not* be super user credentials or the same credentials that you use for your production deployment of dbt. + +Follow the below steps to set up your developer credentials: + +1. Go to the [**Credentials**](https://cloud.getdbt.com/next/settings/profile#credentials) section. + +2. Select the relevant project in the list. + +3. Click **Edit** on the bottom right of the page + +4. Enter your developer credentials and then click **Save.** + +Great job, you should now be able to access the Cloud IDE by clicking **Develop** on the navigation to start developing! + + + +### Access the Cloud IDE + +Now that you've set up your development environment and credentails, you should be able to access the Cloud IDE: + +1. Log in with your [dbt Cloud account](https://cloud.getdbt.com/). If you don't have one, [sign up](https://www.getdbt.com/signup/) for an account for free. +2. Click **Develop** at the top of the page +3. Make sure you've already initialized your project +4. Start developing and use the image and guide below to familiarize yourself with the Cloud IDE and its [features](/docs/get-started/dbt-cloud-features#ide-features): + + + +| Number | Feature | Info | +|---|---|---| +| 1. | File Tree | The file tree allows you to organize your project and manage your files and folders. Click the three-dot menu associated with the file or folder to create, rename, or delete it. Note: This function is unavailable if you’re on the **main** branch. | +| 2. | Editor | This is where you edit your files. You can use the tab for each editor to position it exactly where you need it. | +| 3. | IDE git button | The git button in the IDE allows you to apply the concept of [version control](/docs/collaborate/git/version-control-basics) to your project and you can execute git commands directly in the IDE. | +| 4. | Command bar | You can enter and run commands from the command bar at the bottom of the IDE. Use the [rich model selection syntax](/reference/node-selection/syntax) to execute [dbt commands](/reference/dbt-commands) directly within dbt Cloud. You can also view the history, status, and logs of previous runs by clicking **History** on the left of the bar. +| 5. | Status bar | This area provides you with useful information about your IDE and project status. You also have additional options like restarting or [recloning your repo](/docs/collaborate/git/version-control-basics).| +| 6. | Preview

Compile

Build | This is where you can preview, compile or build your dbt project, as well as see the results and the DAG. | +| 7. | Lineage tab | You can see how models are used as building blocks from left to right to transform your data from raw sources into cleaned-up modular derived pieces and final outputs on the far right of the DAG. You can access files in the **Lineage** tab by double-clicking on a particular model. Expand the DAG into fullscreen to view the DAG view differently. Note: The default view is `+model+`, however, you can change it to `2+model+2`. | +| 8. | Change branches and view documentation | Change branches in fewer clicks and focus on developing. You can generate and view your [documentation](/docs/collaborate/build-and-view-your-docs) for your dbt project in real time. You can inspect and verify what your project's documentation will look like before you deploy your changes to production.| +| 9. | File state indicators | The file state indicators will indicate and track any action or changed made in your project. The indicators **M, U, and •** appear to the right of your file or folder name, and also under the **Changes** section. | +| 10. | Format button | This is where you can format your dbt project code. The new **Format** button formats your file and is powered by [sqlfmt](http://sqlfmt.com/).| + +## Build, compile, and run projects + +You can *build*, *compile*, *run* *, and test* dbt projects by using the command bar. The Cloud IDE will update in real time when you run models, tests, seeds, and operations. + +If a model or test fails, you can review the logs to find and fix the issue. + +You can also use dbt's [rich model selection syntax](/reference/node-selection/syntax) to [run dbt commands](/reference/dbt-commands) directly within dbt Cloud. + + + + + +## Build and view your project's docs + +The dbt Cloud IDE makes it possible to view documentation for your dbt project while your code is still in development. With this workflow, you can inspect and verify what your project's generated documentation will look like before your changes are released to production. + +To generate your project’s documentation (docs) in the IDE, run `dbt docs generate` in the command bar. This command generates the docs for your dbt project as it currently exists in development. + +After you generate a successful run, you can view your documentation for your dbt project in real time by clicking **View Docs** or the book icon above the file tree. + +You can view the latest version of your documentation rendered in a new browser window, and inspect and verify what your project's documentation will look like before you deploy your changes to production. + + +## Related docs + +- [What is dbt?](/docs/introduction#dbt-features) +- [dbt Learn courses](https://courses.getdbt.com/collections) +- [dbt Cloud features](/docs/get-started/dbt-cloud-features) +- [Version control basics](/docs/collaborate/git/version-control-basics) +- [dbt Commands](/reference/dbt-commands) + + +## Related questions + +
+ Is there a cost to using the Cloud IDE? +
+
Not at all! You can use dbt Cloud when you sign up for the Free Developer plan, which comes with one developer seat. If you’d like to access more features or have more developer seats, you can upgrade your account to the Team or Enterprise plan. See dbt pricing plans for more details.
+
+
+
+ Can I be a contributor to dbt Cloud? +
+
Anyone can contribute to the dbt project. And whether it's a dbt package, a plugin, dbt-core, or this documentation site, contributing to the open source code that supports the dbt ecosystem is a great way to level yourself up as a developer, and give back to the community. See Contributing for details on what to expect when contributing to the dbt open source software (OSS).
+
+
+
+ What is the difference between developing on the Cloud IDE and on the CLI? +
+
There are two main ways to develop with dbt: using the web-based IDE in dbt Cloud or using the command-line interface (CLI) in dbt Core:

+ + dbt Cloud IDE dbt Cloud is a web-based application that allows you to develop dbt projects with the IDE, includes a purpose-built scheduler, and provides an easier way to share your dbt documentation with your team. The IDE is a faster and more reliable way to deploy your dbt models and provides a real-time editing and execution environment for your dbt project.

+ + dbt Core CLI The command line interface (CLI) uses dbt Core, an open-source software that’s freely available. You can build your dbt project in a code editor, like Jetbrains or VSCode, and run dbt commands from the command line. +
+
+
+
+ What type of support is provided with dbt Cloud? +
+
The global dbt Support team is available to help dbt Cloud users by email or in-product live chat. Developer and Team accounts offer 24x5 support, while Enterprise customers have priority access and options for custom coverage.

If you have project-related or modeling questions, review our Support page or dbt Community Slack to get help as well.
+
+
diff --git a/website/docs/docs/get-started/docker-install.md b/website/docs/docs/get-started/docker-install.md new file mode 100644 index 00000000000..0a34cf4ec01 --- /dev/null +++ b/website/docs/docs/get-started/docker-install.md @@ -0,0 +1,54 @@ +--- +title: "Install with Docker" +description: "You can use Docker to install dbt and adapter plugins from the command line." +--- + +dbt Core and all adapter plugins maintained by dbt Labs are available as [Docker](https://docs.docker.com/) images, and distributed via [GitHub Packages](https://docs.github.com/en/packages/learn-github-packages/introduction-to-github-packages). + +Using a prebuilt Docker image to install dbt Core in production has a few benefits: it already includes dbt-core, one or more database adapters, and pinned versions of all their dependencies. By contrast, `pip install dbt-core dbt-` takes longer to run, and will always install the latest compatible versions of every dependency. + +You might also be able to use Docker to install and develop locally if you don't have a Python environment set up. Note that running dbt in this manner can be significantly slower if your operating system differs from the system that built the Docker image. If you're a frequent local developer, we recommend that you install dbt Core via [Homebrew](/docs/get-started/homebrew-install) or [pip](/docs/get-started/pip-install) instead. + +### Prerequisites +* You've installed Docker. For more information, see the [Docker](https://docs.docker.com/) site. +* You understand which database adapter(s) you need. For more information, see [About dbt adapters](/docs/get-started/installation#about-dbt-adapters). +* You understand how dbt Core is versioned. For more information, see [About dbt Core versions](core-versions). +* You have a general understanding of the dbt, dbt workflow, developing locally in the command line interface (CLI). For more information, see [About dbt](/docs/introduction#how-do-i-use-dbt). + +### Install a dbt Docker image from Github Packages + +Offical dbt docker images are hosted as [packages in the `dbt-labs` GitHub organization](https://github.com/orgs/dbt-labs/packages?visibility=public). We maintain images and tags for every version of every database adapter, as well as two tags that update as new versions as released: +- `latest`: Latest overall version of dbt-core + this adapter +- `..latest`: Latest patch of dbt-core + this adapter for `.` version family. For example, `1.1.latest` includes the latest patches for dbt Core v1.1. + +Install an image using the `docker pull` command: +``` +docker pull ghcr.io/dbt-labs/: +``` + +### Running a dbt Docker image in a container + +The `ENTRYPOINT` for dbt Docker images is the command `dbt`. You can bind-mount your project to `/usr/app` and use dbt as normal: +``` +docker run \ +--network=host \ +--mount type=bind,source=path/to/project,target=/usr/app \ +--mount type=bind,source=path/to/profiles.yml,target=/root/.dbt/ \ + \ +ls +``` + +Notes: +* Bind-mount sources _must_ be an absolute path +* You may need to make adjustments to the docker networking setting depending on the specifics of your data warehouse or database host. + +### Building your own dbt Docker image + +If the pre-made images don't fit your use case, we also provide a [`Dockerfile`](https://github.com/dbt-labs/dbt-core/blob/main/docker/Dockerfile) and [`README`](https://github.com/dbt-labs/dbt-core/blob/main/docker/README.md) that can be used to build custom images in a variety of ways. + +In particular, the Dockerfile supports building images: +- Images that all adapters maintained by dbt Labs +- Images that install one or more third-party adapters +- Images against another system architecture + +Please note that, if you go the route of building your own Docker images, we are unable to offer dedicated support for custom use cases. If you run into problems, you are welcome to [ask the community for help](getting-help) or [open an issue](oss-expectations#issues) in the `dbt-core` repository. If many users are requesting the same enhancement, we will tag the issue `help_wanted` and invite community contribution. diff --git a/website/docs/tutorial/learning-more/getting-started-dbt-core.md b/website/docs/docs/get-started/getting-started-dbt-core.md similarity index 83% rename from website/docs/tutorial/learning-more/getting-started-dbt-core.md rename to website/docs/docs/get-started/getting-started-dbt-core.md index db20620d06d..11c0666b44d 100644 --- a/website/docs/tutorial/learning-more/getting-started-dbt-core.md +++ b/website/docs/docs/get-started/getting-started-dbt-core.md @@ -2,16 +2,16 @@ title: "Getting started with dbt Core" id: getting-started-dbt-core description: "Connecting your warehouse to dbt Core using the CLI." -sidebar_label: "Getting started with dbt Core" +sidebar_label: "Getting started" --- -When you use dbt Core to work with dbt, you will be editing files locally using a code editor, and running projects using the dbt command line interface (dbt CLI). If you'd rather edit files and run projects using the web-based Integrated Development Environment (IDE), you should refer to [Getting set up in dbt Cloud](/tutorial/getting-set-up). +When you use dbt Core to work with dbt, you will be editing files locally using a code editor, and running projects using the dbt command line interface (dbt CLI). If you'd rather edit files and run projects using the web-based Integrated Development Environment (IDE), you should refer to [Getting set up in dbt Cloud](/docs/get-started/getting-started/set-up-dbt-cloud). ## Prerequisites * To use the dbt CLI, it's important that you know some basics of the Terminal. In particular, you should understand `cd`, `ls` and `pwd` to navigate through the directory structure of your computer easily. -* Install dbt Core using the [installation instructions](/dbt-cli/install/overview) for your operating system. -* Complete [Setting up (in BigQuery)](/tutorial/getting-set-up/setting-up-bigquery#setting-up) and [Loading data (BigQuery)](/tutorial/getting-set-up/setting-up-bigquery#loading-data). +* Install dbt Core using the [installation instructions](/docs/get-started/installation) for your operating system. +* Complete [Setting up (in BigQuery)](/docs/get-started/getting-started/getting-set-up/setting-up-bigquery#setting-up) and [Loading data (BigQuery)](/docs/get-started/getting-started/getting-set-up/setting-up-bigquery#loading-data). * [Create a GitHub account](https://github.com/join) if you don't already have one. ## Create a starter project @@ -25,7 +25,7 @@ The following steps use [GitHub](https://github.com/) as the Git provider for th 1. [Create a new GitHub repository](https://github.com/new) named `dbt-tutorial`. 2. Select **Public** so the repository can be shared with others. You can always make it private later. 3. Leave the default values for all other settings. -4. Click **Create repository**. +4. Click **Create repository**. 5. Save the commands from "…or create a new repository on the command line" to use later in [Commit your changes](#commit-your-changes). ### Create a project @@ -87,7 +87,7 @@ To create your dbt project: ### Connect to BigQuery -When developing locally, dbt connects to your data warehouse using a [profile](/dbt-cli/configure-your-profile), which is a yaml file with all the connection details to your warehouse. +When developing locally, dbt connects to your using a [profile](/docs/get-started/connection-profiles), which is a yaml file with all the connection details to your warehouse. 1. Create a file in the `~/.dbt/` directory named `profiles.yml`. 2. Move your BigQuery keyfile into this directory. @@ -117,7 +117,7 @@ When developing locally, dbt connects to your data warehouse using a [profile](/ ```terminal $ dbt debug - > Connection test: OK connection ok + > Connection test: OK connection ok ```
@@ -126,11 +126,11 @@ When developing locally, dbt connects to your data warehouse using a [profile](/ #### FAQs - - - - - + + + + + ### Perform your first dbt run @@ -166,14 +166,14 @@ Commit your changes so that the repository contains the latest code. ## Build your first models -Now that you set up your sample project, you can get to the fun part — [building models](building-models)! You will take a sample query and turn it into a model in your dbt project. +Now that you set up your sample project, you can get to the fun part — [building models](/docs/build/sql-models)! You will take a sample query and turn it into a model in your dbt project. ### Checkout a new git branch Check out a new git branch to work on new code: 1. Create a new branch by using the `checkout` command and passing the `-b` flag: - + ```terminal $ git checkout -b add-customers-model > Switched to a new branch `add-customer-model` @@ -196,11 +196,11 @@ When you return to the BigQuery console, you can `select` from this model. #### FAQs - - - - - + + + + + ### Change the way your model is materialized @@ -239,8 +239,8 @@ You can also explore: #### FAQs - - + + #### Next steps @@ -258,4 +258,4 @@ You need to commit the changes you made to the project so that the repository ha ## Schedule a job -We recommend using dbt Cloud to schedule a job. For more information about using dbt Core to schedule a job, see [dbt airflow](/blog/dbt-airflow-spiritual-alignment) blog post or [Running a dbt project](/docs/running-a-dbt-project/running-dbt-in-production). +We recommend using dbt Cloud to schedule a job. For more information about using dbt Core to schedule a job, see [dbt airflow](/blog/dbt-airflow-spiritual-alignment) blog post or [deployments](/docs/deploy/deployments). diff --git a/website/docs/tutorial/add-a-seed.md b/website/docs/docs/get-started/getting-started/add-a-seed.md similarity index 86% rename from website/docs/tutorial/add-a-seed.md rename to website/docs/docs/get-started/getting-started/add-a-seed.md index 7561508d2a8..85080109cd4 100644 --- a/website/docs/tutorial/add-a-seed.md +++ b/website/docs/docs/get-started/getting-started/add-a-seed.md @@ -4,7 +4,7 @@ id: add-a-seed description: Learn how to add a seed file to your project --- :::caution Heads up! -You'll need to have completed the Getting Started part of this tutorial to +You'll need to have completed the earlier parts of this guide to complete this lesson ::: diff --git a/website/docs/tutorial/building-your-first-project.md b/website/docs/docs/get-started/getting-started/building-your-first-project.md similarity index 100% rename from website/docs/tutorial/building-your-first-project.md rename to website/docs/docs/get-started/getting-started/building-your-first-project.md diff --git a/website/docs/docs/get-started/getting-started/building-your-first-project/build-your-first-models.md b/website/docs/docs/get-started/getting-started/building-your-first-project/build-your-first-models.md new file mode 100644 index 00000000000..48d859fdeaf --- /dev/null +++ b/website/docs/docs/get-started/getting-started/building-your-first-project/build-your-first-models.md @@ -0,0 +1,55 @@ +--- +title: Build your first models +id: build-your-first-models +description: "Now that you've set up the starter project, you can get to the fun part — building models!" +--- + +Now that you set up your sample project and had a successful run, you can get to the fun part — [building models](/docs/build/sql-models)! You will take a sample query and turn it into a model in your dbt project. A model in dbt is a select statement. + +## Checkout a new git branch + +1. Click **Develop** from the upper left of dbt Cloud. You need to create a new branch since the main branch is now set to read-only mode. + +2. Click **Create branch**, and name your branch `add-customers-model`. + + + + +## Build your first model + +1. Click **Develop** from the upper left of dbt Cloud. +2. Click the **...** next to the Models directory, then select **Create file**. +3. Name the file `models/customers.sql`, then click **Create**. +4. Copy the following query into the file and click **Save File**. + + + +5. Enter `dbt run` in the command prompt at the bottom of the screen. You should get a successful run and see three models under DETAILS. + +In the future, you would connect your business intelligence (BI) tools to these views and tables so they only read cleaned up data rather than raw data in your BI tool. + +#### FAQs + + + + + + + +## Change the way your model is materialized + + + +## Delete the example models + + + +## Build models on top of other models + + + +## Next steps + +Once you build your first model, you're ready to [test and document your project](/docs/get-started/getting-started/building-your-first-project/test-and-document-your-project). + + diff --git a/website/docs/tutorial/building-your-first-project/schedule-a-job.md b/website/docs/docs/get-started/getting-started/building-your-first-project/schedule-a-job.md similarity index 62% rename from website/docs/tutorial/building-your-first-project/schedule-a-job.md rename to website/docs/docs/get-started/getting-started/building-your-first-project/schedule-a-job.md index 322bde37446..a6b363738b0 100644 --- a/website/docs/tutorial/building-your-first-project/schedule-a-job.md +++ b/website/docs/docs/get-started/getting-started/building-your-first-project/schedule-a-job.md @@ -1,7 +1,7 @@ --- title: Schedule a job id: schedule-a-job -description: In this part of the tutorial, we'll go through how you can schedule a job in dbt Cloud. +description: In this part of the guide, we'll go through how you can schedule a job in dbt Cloud. --- In this part of the guide, you will learn how to schedule a job to be run in your production environment. Scheduling a job is sometimes called _deploying a project_. @@ -17,24 +17,26 @@ Now that you've built your customer model, you need to commit the changes you ma ## Create a deployment environment -1. Click ![hamburger icon](/img/hamburger-icon.png), then click **Home**. -2. Under "Deployment Environments" select **Create a new one**. +1. In the upper left, select **Deploy**, then click **Environments**. +2. Click **Create Environment**. 3. Name your deployment environment. For example, "Production." -4. Add a target dataset (for example, "analytics"). dbt will build into this dataset. For some warehouses this will be named "schema." +4. Add a target dataset, for example, "Analytics." dbt will build into this dataset. For some warehouses this will be named "schema." +5. Click **Save**. ## Create and run a job Jobs are a set of dbt commands that you want to run on a schedule. For example, `dbt run` and `dbt test`. -1. After creating your deployment environment, you should be directed to the page for new environment. If not, click ![hamburger icon](/img/hamburger-icon.png), then click **Jobs**. -2. Click **New Job** and provide a name, for example "Production run", and link to the Environment you just created. -3. Click **Generate docs**. -4. Ensure you include these commands as part of your job: +1. After creating your deployment environment, you should be directed to the page for new environment. If not, select **Deploy** in the upper left, then click **Jobs**. +2. Click **Create one** and provide a name, for example "Production run", and link to the Environment you just created. +3. Scroll down to "Execution Settings" and select **Generate docs on run**. +4. Under "Commands," add these commands as part of your job if you don't see them: * `dbt run` * `dbt test` 5. For this exercise, **do NOT** set a schedule for your project to run -- while your organization's project **should** run regularly, there's no need to run this project on a schedule. 6. Select **Save**, then click **Run now** to run your job. -7. Click the run to see the progress — once the run is complete, click **View Documentation** to see the docs for your project. +7. Click the run and watch its progress under "Run history." +8. Once the run is complete, click **View Documentation** to see the docs for your project. :::tip Congratulations 🎉! You've just deployed your first dbt project! @@ -42,22 +44,22 @@ Congratulations 🎉! You've just deployed your first dbt project! ### FAQs - + ## Next steps -Congratulations! Now that you've got a working dbt project, you can read about dbt [best practices](/docs/guides/best-practices). +Congratulations! Now that you've got a working dbt project, you can read about dbt [best practices](/guides/best-practices). You can improve your dbt skills with these fun exercises: -* Turn your raw data references (for example, turn `` `dbt-tutorial`.jaffle_shop.orders``) into [sources](/docs/building-a-dbt-project/using-sources). +* Turn your raw data references (for example, turn `` `dbt-tutorial`.jaffle_shop.orders``) into [sources](/docs/build/sources). * Build a new models for `orders`, that uses the `payments` table to calculate the total order amount. * Reorganize your project into [how we structure dbt projects](/blog/how-we-structure-our-dbt-projects). * If you want a more in-depth learning experience, we recommend taking the [dbt Fundamentals on our dbt Learn online courses site](https://courses.getdbt.com/courses/fundamentals). -Here are some ways to [learn more](learning-more) essential dbt skills: +Here are some ways to learn more essential dbt skills: -* Learn how to use Jinja in your project by reading the [Jinja tutorial](using-jinja). -* Learn how to [connect to dbt Core using the CLI](getting-started-dbt-core). -* Refactor [legacy SQL to dbt SQL](refactoring-legacy-sql). +* Learn how to use Jinja in your project by reading the [Jinja tutorial](/docs/get-started/learning-more/using-jinja). +* Learn how to [connect to dbt Core using the CLI](/docs/get-started/getting-started-dbt-core). +* Refactor [legacy SQL to dbt SQL](/docs/get-started/learning-more/refactoring-legacy-sql). diff --git a/website/docs/tutorial/building-your-first-project/test-and-document-your-project.md b/website/docs/docs/get-started/getting-started/building-your-first-project/test-and-document-your-project.md similarity index 76% rename from website/docs/tutorial/building-your-first-project/test-and-document-your-project.md rename to website/docs/docs/get-started/getting-started/building-your-first-project/test-and-document-your-project.md index dfde39e9692..db7fa7f3ec0 100644 --- a/website/docs/tutorial/building-your-first-project/test-and-document-your-project.md +++ b/website/docs/docs/get-started/getting-started/building-your-first-project/test-and-document-your-project.md @@ -16,11 +16,11 @@ description: Let's test and document our models to build trust in our data. #### FAQs - - + + ## Next steps -Once you test and document your project, you're ready to [Schedule a job](tutorial/building-your-first-project/schedule-a-job). +Once you test and document your project, you're ready to [Schedule a job](/docs/get-started/getting-started/building-your-first-project/schedule-a-job). diff --git a/website/docs/tutorial/create-a-project.md b/website/docs/docs/get-started/getting-started/create-a-project.md similarity index 90% rename from website/docs/tutorial/create-a-project.md rename to website/docs/docs/get-started/getting-started/create-a-project.md index cd13f5fbe57..9ecd487d527 100644 --- a/website/docs/tutorial/create-a-project.md +++ b/website/docs/docs/get-started/getting-started/create-a-project.md @@ -13,6 +13,6 @@ You can work with dbt in two ways: * **dbt Cloud**: Edit files and run projects using the web-based Integrated Development Environment (IDE). * **dbt CLI**: Edit files locally using a code editor, and run projects using the command line interface (CLI). -To use the CLI, it's important that you know some basics of your terminal. In particular, you should understand `cd`, `ls` and `pwd` to navigate through the directory structure of your computer easily. As such, if you are new to programming, we recommend using **dbt Cloud** for this tutorial. +To use the CLI, it's important that you know some basics of your terminal. In particular, you should understand `cd`, `ls` and `pwd` to navigate through the directory structure of your computer easily. As such, if you are new to programming, we recommend using **dbt Cloud** for this guide. -If you wish to use the CLI, please follow the [installation instructions](/dbt-cli/install/overview) for your operating system. +If you wish to use the CLI, please follow the [installation instructions](/docs/get-started/installation) for your operating system. diff --git a/website/docs/docs/get-started/getting-started/getting-set-up.md b/website/docs/docs/get-started/getting-started/getting-set-up.md new file mode 100644 index 00000000000..3ef1731bc38 --- /dev/null +++ b/website/docs/docs/get-started/getting-started/getting-set-up.md @@ -0,0 +1,20 @@ +--- +title: About setting up dbt Cloud +id: set-up-dbt-cloud +description: "Set up a sample project to get you started using dbt Cloud." +sidebar_label: "About set up" +--- + +Learn how to set up dbt Cloud using a sample project and one of the most common data platforms. You can select from [BigQuery](/docs/get-started/getting-started/getting-set-up/setting-up-bigquery), [Databricks](/docs/get-started/getting-started/getting-set-up/setting-up-databricks), [Redshift](/docs/get-started/getting-started/getting-set-up/setting-up-redshift), or [Snowflake](/docs/get-started/getting-started/getting-set-up/setting-up-snowflake). If you're not sure, then try [BigQuery](/docs/get-started/getting-started/getting-set-up/setting-up-bigquery). + +This guide will show you how to set up dbt and perform some key tasks. These tasks will illustrate how dbt establishes standard practices for your work: + +* Set up a warehouse with sample data +* Connect the warehouse to dbt +* Add a Git repository to dbt +* Execute a dbt transformation using `dbt run` +* Schedule a job or transformation + +If you want a more in-depth learning experience, we recommend taking the dbt Fundamentals on our [dbt Learn online courses site](https://courses.getdbt.com/). + +If you'd rather edit and run files locally using the dbt command line interface (dbt CLI) to connect to dbt Core, then you can refer to [Getting started using dbt Core](/docs/get-started/getting-started-dbt-core). diff --git a/website/docs/tutorial/getting-set-up/setting-up-bigquery.md b/website/docs/docs/get-started/getting-started/getting-set-up/setting-up-bigquery.md similarity index 89% rename from website/docs/tutorial/getting-set-up/setting-up-bigquery.md rename to website/docs/docs/get-started/getting-started/getting-set-up/setting-up-bigquery.md index 2103846e1b2..86ef5bfc6ef 100644 --- a/website/docs/tutorial/getting-set-up/setting-up-bigquery.md +++ b/website/docs/docs/get-started/getting-started/getting-set-up/setting-up-bigquery.md @@ -3,6 +3,7 @@ title: "Set up and connect BigQuery" id: setting-up-bigquery description: "Set up BigQuery with sample data and connect to dbt Cloud." sidebar_label: "Set up and connect BigQuery" +pagination_next: docs/get-started/getting-started/building-your-first-project/build-your-first-models --- ## Introduction @@ -12,12 +13,12 @@ For the BigQuery project in the getting started guide, you'll learn how to set u This guide will walk you through: - Setting up a new BigQuery instance -- Accessing sample date in a public data set +- Accessing sample data in a public data set - Connecting dbt Cloud to BigQuery ## Prerequisites -Before beginning this tutorial, make sure that you have access to **new or existing Google account**. You can use a personal or work account to set up BigQuery through Google Cloud Platform (GCP). +Before beginning this guide, make sure that you have access to [new or existing Google account](https://support.google.com/accounts/answer/27441?hl=en). You can use a personal or work account to set up BigQuery through [Google Cloud Platform (GCP)](https://cloud.google.com/free). ## Setting up @@ -88,7 +89,7 @@ You will learn how to connect dbt Cloud to Google BigQuery so that you can lever -In order to let dbt connect to your warehouse, you'll need to generate a keyfile. This is analogous to using a database user name and password with most other data warehouses. +In order to let dbt connect to your warehouse, you'll need to generate a keyfile. This is analogous to using a database user name and password with most other data warehouses. 1. Go to the [BigQuery credential wizard](https://console.cloud.google.com/apis/credentials/wizard). Make sure your new project is selected in the header. If you do not see your account or project, click your profile picture to the right and verify your are using the correct email account. 2. Select **+ Create Credentials** then select **Service account**. @@ -100,7 +101,7 @@ In order to let dbt connect to your warehouse, you'll need to generate a keyfile 8. Select **Keys**. 9. Click **Add Key** then select **Create new key**. 10. Select **JSON** as the key type then click **Create**. -11. You should be prompted to download the JSON file. Save it locally to an easy-to-remember spot, with a clear filename. For example, `dbt-user-creds.json`. +11. You should be prompted to download the file. Save it locally to an easy-to-remember spot, with a clear filename. For example, `dbt-user-creds.json`. ### Create a dbt Cloud account @@ -132,7 +133,7 @@ Now let's set up the connection between dbt Cloud and BigQuery. Congratulations! You have successfully completed the following: - Set up a new BigQuery instance -- Accessing sample date in a public data set +- Accessing sample data in a public data set - Connected dbt Cloud to BigQuery ## Next steps diff --git a/website/docs/tutorial/getting-set-up/setting-up-databricks.md b/website/docs/docs/get-started/getting-started/getting-set-up/setting-up-databricks.md similarity index 83% rename from website/docs/tutorial/getting-set-up/setting-up-databricks.md rename to website/docs/docs/get-started/getting-started/getting-set-up/setting-up-databricks.md index 84873ec0569..3d43d312565 100644 --- a/website/docs/tutorial/getting-set-up/setting-up-databricks.md +++ b/website/docs/docs/get-started/getting-started/getting-set-up/setting-up-databricks.md @@ -3,6 +3,8 @@ title: "Set up and connect Databricks" id: setting-up-databricks description: "Set up Databricks with sample data and connect to dbt Cloud." sidebar_label: "Set up and connect Databricks" +pagination_prev: docs/get-started/getting-started/set-up-dbt-cloud +pagination_next: docs/get-started/getting-started/building-your-first-project/build-your-first-models --- ## Introduction @@ -148,7 +150,7 @@ Our next step is to load some data to transform. Luckily for us, Databricks make ```sql select * from default.jaffle_shop_customers select * from default.jaffle_shop_orders - select * from default.stripe.payments + select * from default.stripe_payments ```
@@ -198,34 +200,43 @@ Congratulations! At this point, you have created a Databricks account, loaded tr ### Option 2: Connect dbt Cloud and Databricks manually -1. To manually setup dbt Cloud, you will need the SQL Endpoint connection information and to generate a user token. You can find your SQL endpoint connection information by going to the `Databricks UI > SQL > SQL Endpoints > Starter Endpoint > Connection details`. Save this information because you will need it later. +#### Get endpoint and token information - +To manually setup dbt Cloud, you will need the SQL Endpoint connection information and to generate a user token. You can find your SQL endpoint connection information by going to the `Databricks UI > SQL > SQL Endpoints > Starter Endpoint > Connection details`. Save this information because you will need it later. -2. To generate a user token for your development credentials in dbt Cloud, click on `Settings` on the left side console (while still in the SQL part of the workspace). Click on `Personal Access Token` and provide a comment like `dbt Cloud development`. Save the token information somewhere because you will need it for the next part. -
- -
+ + +To generate a user token for your development credentials in dbt Cloud, click on `Settings` on the left side console (while still in the SQL part of the workspace). Click on `Personal Access Token` and provide a comment like `dbt Cloud development`. Save the token information somewhere because you will need it for the next part. +
+ +
+ +#### Create a dbt Cloud account -3. Choose **Databricks** to setup your connection. -
- -
-4. For the name, write `Databricks` or another simple title. -5. For Databricks settings, reference your SQL endpoint connection details from step 6 of the previous section for each of the following fields: +#### Connect dbt Cloud to Databricks + +1. Choose **Databricks** to setup your connection. + + +2. For the name, write `Databricks` or another simple title. +3. For Databricks settings, reference your SQL endpoint connection details from step 6 of the previous section for each of the following fields: - Method will be ODBC - Hostname comes from Server hostname - Endpoint comes from the last part of HTTP path after `/endpoints` - -6. For your Development Credentials, type: +
+ +
+ +4. For your Development Credentials, type: + + - `User` and `token` that you saved in a previous step. + - You’ll notice that the schema name has been auto created for you. By convention, this is `dbt_`. This is the schema connected directly to your development environment, and it's where your models will be built when running dbt within the Cloud IDE. - - `User` and `token` that you saved in a previous step. - - For the schema field, choose a development schema (this will be your default development database to build objects into). We recommend something in the form of dbt_{{ first initial, last name}} like `dbt_achen`. - -7. Then scroll to the top of the page to test your connection. Once successful, click `Continue`. +5. Click **Test Connection** at the bottom. This verifies that dbt Cloud can access your Databricks workspace. +6. If the connection test succeeds, click **Next**. If it fails, you may need to check your Databricks settings and credentials. ## Initialize your repository and start development diff --git a/website/docs/tutorial/getting-set-up/setting-up-redshift.md b/website/docs/docs/get-started/getting-started/getting-set-up/setting-up-redshift.md similarity index 82% rename from website/docs/tutorial/getting-set-up/setting-up-redshift.md rename to website/docs/docs/get-started/getting-started/getting-set-up/setting-up-redshift.md index 7304910b98a..85300aca8ce 100644 --- a/website/docs/tutorial/getting-set-up/setting-up-redshift.md +++ b/website/docs/docs/get-started/getting-started/getting-set-up/setting-up-redshift.md @@ -3,6 +3,8 @@ title: "Set up and connect Redshift" id: setting-up-redshift description: "Set up Redshift with sample data and connect to dbt Cloud." sidebar_label: "Set up and connect Redshift" +pagination_prev: docs/get-started/getting-started/set-up-dbt-cloud +pagination_next: docs/get-started/getting-started/building-your-first-project/build-your-first-models --- ## Introduction @@ -32,7 +34,7 @@ Let’s get started by accessing your AWS account and setting up Redshift. -4. When the stack status changes to “CREATE_COMPLETE”, click on the `Outputs` tab on the top to view information that you will use throughout the rest of this tutorial. Save those credentials for later by keeping this open in a tab. +4. When the stack status changes to “CREATE_COMPLETE”, click on the `Outputs` tab on the top to view information that you will use throughout the rest of this guide. Save those credentials for later by keeping this open in a tab. 5. Type in `Redshift` to the search bar on the top and click on `Amazon Redshift` @@ -64,9 +66,9 @@ Congrats! You have your Redshift cluster. Now we are going to load our sample data into the S3 bucket that our Cloudformation template created. S3 buckets are a cheap and simple way to store data outside of Redshift. 1. The data used in this course is stored as CSVs in a public S3 bucket. You can use the following URLs to download these files. Download these to your computer to use in the following steps. -- [jaffle_shop_customers.csv](https://www.google.com/url?q=http://dbt-tutorial-public.s3-us-west-2.amazonaws.com/jaffle_shop_customers.csv&sa=D&source=editors&ust=1644864530119236&usg=AOvVaw3IVEW44ZbyLKJ5x0GZc_y_) -- [jaffle_shop_orders.csv](https://www.google.com/url?q=http://dbt-tutorial-public.s3-us-west-2.amazonaws.com/jaffle_shop_orders.csv&sa=D&source=editors&ust=1644864530119746&usg=AOvVaw0CjkjBeGxTipTjfbxvmN-_) -- [stripe_payments.csv](https://www.google.com/url?q=http://dbt-tutorial-public.s3-us-west-2.amazonaws.com/stripe_payments.csv&sa=D&source=editors&ust=1644864530120240&usg=AOvVaw1nwPSDg9fp-pnzepudMSLm) +- [jaffle_shop_customers.csv](https://dbt-tutorial-public.s3-us-west-2.amazonaws.com/jaffle_shop_customers.csv) +- [jaffle_shop_orders.csv](https://dbt-tutorial-public.s3-us-west-2.amazonaws.com/jaffle_shop_orders.csv) +- [stripe_payments.csv](https://dbt-tutorial-public.s3-us-west-2.amazonaws.com/stripe_payments.csv) 2. Now we are going to use the S3 bucket that you created via CloudFormation and upload the files. Go to the search bar at the top and type in `S3` and click on S3. There will be sample data in the file already, feel free to ignore it or use it for other modeling exploration. @@ -158,30 +160,39 @@ Congratulations! At this point, you have created a Redshift instance and loaded ## Connecting to dbt Cloud +#### Create a dbt Cloud account + -### Connect dbt Cloud to Redshift +#### Connect dbt Cloud to Redshift Now let's set up the connection between dbt Cloud and Redshift -For Set up a Database Connection, choose Redshift. - 1. Click **Redshift** to set up your connection. +
+ +
- +2. For the name, write `Redshift` or another simple title. -2. For the name, simply choose Redshift or another simple title 3. Enter your Redshift settings. Reference your credentials you saved from the CloudFormation template. - Your hostname is the entire hostname. Make sure to drop the http:// at the beginning and any trailing slashes at the end. - The port is `5439` - The database is `dbtworkshop`. +
+ +
+ 4. Set your development credentials. These credentials will be used by dbt Cloud to connect to Redshift. Those credentials (as provided in your cloudformation output) will be: - Username: `dbtadmin` - password: *this was the password that you set earlier in the guide* - - schema: This is your sandbox schema where you will build all of your development objects into. We generally use the `dbt_` naming convention. + - Schema: You’ll notice that the schema name has been auto created for you. By convention, this is `dbt_`. This is the schema connected directly to your development environment, and it's where your models will be built when running dbt within the Cloud IDE. +
+ +
-4. Click **Test** at the top. This verifies that dbt Cloud can access your Redshift cluster. -5. If you see "Connection test Succeeded!" then click **Continue**. If it fails, you might need to go back and check your Redshift settings and development credentials. +4. Click **Test Connection** at the bottom. This verifies that dbt Cloud can access your Redshift cluster. +5. If the connection test succeeds, click **Next**. If it fails, you may need to check your Redshift settings and credentials. ## Initialize your repository and start development diff --git a/website/docs/tutorial/getting-set-up/setting-up-snowflake.md b/website/docs/docs/get-started/getting-started/getting-set-up/setting-up-snowflake.md similarity index 84% rename from website/docs/tutorial/getting-set-up/setting-up-snowflake.md rename to website/docs/docs/get-started/getting-started/getting-set-up/setting-up-snowflake.md index 150e6c2b0f0..86ad73d3d47 100644 --- a/website/docs/tutorial/getting-set-up/setting-up-snowflake.md +++ b/website/docs/docs/get-started/getting-started/getting-set-up/setting-up-snowflake.md @@ -3,6 +3,8 @@ title: "Set up and connect Snowflake" id: setting-up-snowflake description: "Set up Snowflake with sample data and connect to dbt Cloud." sidebar_label: "Set up and connect Snowflake" +pagination_prev: docs/get-started/getting-started/set-up-dbt-cloud +pagination_next: docs/get-started/getting-started/building-your-first-project/build-your-first-models --- ## Introduction @@ -19,7 +21,7 @@ This guide will walk you through: ## Prerequisites -The only prerequisites for this tutorial are to have access to an email account for signing up for Snowflake and dbt Cloud. +The only prerequisites for this guide are to have access to an email account for signing up for Snowflake and dbt Cloud. ## Setting up @@ -173,7 +175,7 @@ There are two ways to connect dbt Cloud and Snowflake. The first option is Partn ### Option 1: Connect dbt Cloud and Snowflake with partner connect -1. With your Snowflake account up and running with data, we’re ready to connect it with dbt Cloud. We’re going to use [Snowflake Partner Connect](https://docs.snowflake.com/en/user-guide/ecosystem-partner-connect.html) to set up your dbt Cloud account and project. Using Partner Connect will allow you to create a complete dbt account with your [Snowflake connection](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-snowflake), [a managed repository](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository), [environments](https://docs.getdbt.com/docs/guides/managing-environments), and credentials. +1. With your Snowflake account up and running with data, we’re ready to connect it with dbt Cloud. We’re going to use [Snowflake Partner Connect](https://docs.snowflake.com/en/user-guide/ecosystem-partner-connect.html) to set up your dbt Cloud account and project. Using Partner Connect will allow you to create a complete dbt account with your [Snowflake connection](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-snowflake), [a managed repository](/docs/collaborate/git/managed-repository), [environments](/docs/build/custom-schemas#managing-environments), and credentials. 2. There’s a couple of ways you can access the Partner Connect page depending on if you’re navigating in the classic Snowflake UI or the new UI. * **Snowflake Classic UI:** If you’re using the classic version of the Snowflake UI, you can click the Partner Connect button in the top bar of your account. From there, click on the dbt tile to open up the connect box. @@ -218,7 +220,7 @@ There are two ways to connect dbt Cloud and Snowflake. The first option is Partn

-6. We have one slight tweak to make to dbt Cloud interface to account for the `analytics` database and `transforming` warehouse created earlier. Click the hamburger menu in the top left and choose account settings. Select the project titled, "Partner Connection Trial" and select `snowflake` in the overview table. Select edit and update the fields `database` and `warehouse` to be `analytics` and `transforming` respectively. +6. We have one slight tweak to make to the dbt Cloud interface to account for the `analytics` database and `transforming` warehouse created earlier. Click the gear icon in the upper right and select **Account Settings**. Choose the "Partner Connection Trial" project and select `snowflake` in the overview table. Select edit and update the fields `database` and `warehouse` to be `analytics` and `transforming`, respectively.

@@ -240,13 +242,17 @@ There are two ways to connect dbt Cloud and Snowflake. The first option is Partn Now let's formally set up the connection between dbt Cloud and Snowflake. -1. Click on the "Snowflake" icon to set up your connection. +1. Choose **Snowflake** to setup your connection. +

+ +
+2. For the name, write `Snowflake` or another simple title. 2. Enter the following information under Snowflake settings. - * **Account:** Your account is going to be the url of your Snowflake trial account up to the period before snowflakecomputing.com. So, if this is the full url of my trial account: `oq65696.west-us-2.azure.snowflakecomputing.com` then the account will be: `oq65696.west-us-2.azure`. You can read more about Snowflake account identifiers [here](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html). + * **Account:** Find your account by using the Snowflake trial account URL and removing `snowflakecomputing.com`. The order of your account information will vary by Snowflake version. For example, Snowflake's Classic console URL might look like: `oq65696.west-us-2.azure.snowflakecomputing.com`. The AppUI or Snowsight URL might look more like: `snowflakecomputing.com/west-us-2.azure/oq65696`. In both examples, your account will be: `oq65696.west-us-2.azure`. For more information, see "[Account Identifiers](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html)" in the Snowflake documentation. +
* **Role:** Leave blank for now. You can update this to a default Snowflake role in the future. * **Database:** `analytics`. This tells dbt to create new models in the analytics database. * **Warehouse:** `transforming`. This tells dbt to use the transforming warehouse we created earlier. -
@@ -254,16 +260,15 @@ Now let's formally set up the connection between dbt Cloud and Snowflake. 3. Enter the following information under Development credentials. * **Username:** The username you created for Snowflake. Note: The username is not your email address and is usually your first and last name together in one word. * **Password:** The password you set when creating your Snowflake account - * **Schema:** You’ll notice that the schema name has been auto created for you and is `dbt_` followed by your first initial and last name. This is the schema connected directly to your development environment and is where your models will be built when running dbt within the Cloud IDE. + * **Schema:** You’ll notice that the schema name has been auto created for you. By convention, this is `dbt_`. This is the schema connected directly to your development environment, and it's where your models will be built when running dbt within the Cloud IDE. * **Target name:** leave as default - * **Threads:** Leave as 4. This is the number of simultaneous connects that dbt Cloud will make to build models concurently. - + * **Threads:** Leave as 4. This is the number of simultaneous connects that dbt Cloud will make to build models concurrently.
-4. Click **Test** at the top. This will check that dbt Cloud can access your Snowflake account. -5. If test successful, click **Continue**. +4. Click **Test Connection** at the bottom. This verifies that dbt Cloud can access your Snowflake account. +5. If the connection test succeeds, click **Next**. If it fails, you may need to check your Snowflake settings and credentials. ## Initialize your repository and start development @@ -279,10 +284,9 @@ If you used Partner Connect, you can skip over to [initializing your dbt project Congratulations! You have successfully completed the following: -- Set up a Databricks account -- Loaded training data into your Databricks account -- Configured a SQL endpoint in Databricks -- Connected dbt Cloud and Databricks +- Set up a new Snowflake instance +- Loaded training data into your Snowflake account +- Connected dbt Cloud and Snowflake ## Next steps diff --git a/website/docs/docs/get-started/getting-started/overview.md b/website/docs/docs/get-started/getting-started/overview.md new file mode 100644 index 00000000000..635b80a0155 --- /dev/null +++ b/website/docs/docs/get-started/getting-started/overview.md @@ -0,0 +1,34 @@ +--- +title: About getting started +id: overview +sidebar_label: "About getting started" +description: "Create your first dbt project using a SQL query." +--- +
+ + + +Before you begin, you will need: + +* Warehouse with sample data. If you don't have this, you can use the [BigQuery project](/docs/get-started/getting-started/getting-set-up/setting-up-bigquery), which leverages public data sets. +* Basic understanding of Git. +* Basic understanding of SQL. + + +
+ + + + + +
+ +
diff --git a/website/docs/dbt-cli/install/homebrew.md b/website/docs/docs/get-started/homebrew-install.md similarity index 85% rename from website/docs/dbt-cli/install/homebrew.md rename to website/docs/docs/get-started/homebrew-install.md index aeeecb8c174..7617ff6d03f 100644 --- a/website/docs/dbt-cli/install/homebrew.md +++ b/website/docs/docs/get-started/homebrew-install.md @@ -1,5 +1,5 @@ --- -title: "Use Homebrew to install dbt" +title: "Install with Homebrew" description: "You can use Homebrew to install dbt Core and adapter plugins from the command line." --- @@ -16,7 +16,7 @@ If that sounds like you, great! Homebrew makes it significantly easier to instal - Installation with Homebrew can take longer than installing with other methods, because `brew` takes care of more setup behind the scenes - If you're using an M1 Mac, we recommend that you install dbt via Homebrew with [Rosetta](https://support.apple.com/en-us/HT211861). This is necessary for certain dependencies that are only supported on Intel processors. -If you're someone who prefers to manage Python environments yourself, such as having multiple versions of Python to switch between, we recommend you install dbt Core via [`pip` instead](install/pip). +If you're someone who prefers to manage Python environments yourself, such as having multiple versions of Python to switch between, we recommend you install dbt Core via [`pip` instead](/docs/get-started/pip-install). ### Installing with Homebrew @@ -28,7 +28,7 @@ brew install git brew tap dbt-labs/dbt ``` -Now you're ready to install dbt. Once you know [which adapter](available-adapters) you're using, you can install it as `dbt-`. For instance, if using Postgres: +Now you're ready to install dbt. Once you know [which adapter](supported-data-platforms) you're using, you can install it as `dbt-`. For instance, if using Postgres: ```shell brew install dbt-postgres @@ -36,7 +36,7 @@ brew install dbt-postgres Everywhere below that you see ``, replace it with the adapter name you're using. -**Note**: If you're using an adapter that isn't available as a Homebrew formula, we recommend you use [pip](install/pip) instead. +**Note**: If you're using an adapter that isn't available as a Homebrew formula, we recommend you use [pip](/docs/get-started/pip-install) instead. ### Upgrading dbt and your adapter diff --git a/website/docs/docs/get-started/installation-overview.md b/website/docs/docs/get-started/installation-overview.md new file mode 100644 index 00000000000..cfb03a6b2cc --- /dev/null +++ b/website/docs/docs/get-started/installation-overview.md @@ -0,0 +1,17 @@ +--- +title: "Installation overview" +id: "installation" +description: "You can install dbt Core using a few different tested methods." +--- + +You can install dbt Core on the command line by using one of these recommended methods: + +- [Use Homebrew to install dbt](/docs/get-started/homebrew-install) (recommended for MacOS + most popular plugins) +- [Use pip to install dbt](/docs/get-started/pip-install) +- [Use a Docker image to install dbt](/docs/get-started/docker-install) +- [Install dbt from source](/docs/get-started/source-install) + + +## About dbt adapters + +dbt works with a number of different data platforms (databases, query engines, and other SQL-speaking technologies). It does this by using a dedicated _adapter_ for each. When you install dbt, you'll also want to install the specific adapter for your database. For more details, see [Supported Data Platforms](supported-data-platforms). diff --git a/website/docs/tutorial/learning-more/refactoring-legacy-sql.md b/website/docs/docs/get-started/learning-more/refactoring-legacy-sql.md similarity index 85% rename from website/docs/tutorial/learning-more/refactoring-legacy-sql.md rename to website/docs/docs/get-started/learning-more/refactoring-legacy-sql.md index c347cb86a44..210eb10a58b 100644 --- a/website/docs/tutorial/learning-more/refactoring-legacy-sql.md +++ b/website/docs/docs/get-started/learning-more/refactoring-legacy-sql.md @@ -1,14 +1,14 @@ --- -title: Refactoring legacy SQL to dbt SQL +title: Refactoring legacy SQL to dbt id: refactoring-legacy-sql -description: This tutorial walks through refactoring a long SQL query (perhaps from a stored procedure) into modular dbt data models. +description: This guide walks through refactoring a long SQL query (perhaps from a stored procedure) into modular dbt data models. --- -You may have already learned how to build dbt models from scratch. +You may have already learned how to build dbt models from scratch. -But in reality, you probably already have some queries or stored procedures that power analyses and dashboards, and now you’re wondering how to port those into dbt. +But in reality, you probably already have some queries or stored procedures that power analyses and dashboards, and now you’re wondering how to port those into dbt. -There are two parts to accomplish this: migration and refactoring. In this tutorial we’re going to learn a process to help us turn legacy SQL code into modular dbt models. +There are two parts to accomplish this: migration and refactoring. In this guide we’re going to learn a process to help us turn legacy SQL code into modular dbt models. When migrating and refactoring code, it’s of course important to stay organized. We'll do this is by following several steps (jump directly from the right sidebar): @@ -21,8 +21,9 @@ When migrating and refactoring code, it’s of course important to stay organize Let's get into it! -> Note: this tutorial is excerpted from the new dbt Learn On-demand Course, "Refactoring from Procedural SQL to dbt" - if you're curious, pick up the [free refactoring course here](https://courses.getdbt.com/courses/refactoring-sql-for-modularity), which includes example and practice refactoring projects. - +:::info More resources. +This guide is excerpted from the new dbt Learn On-demand Course, "Refactoring SQL for Modularity" - if you're curious, pick up the [free refactoring course here](https://courses.getdbt.com/courses/refactoring-sql-for-modularity), which includes example and practice refactoring projects. Or for a more in-depth look at migrating DDL and DML from stored procedures check out [this guide](/guides/migration/tools/migrating-from-stored-procedures/1-migrating-from-stored-procedures). +::: ## Migrate your existing SQL code @@ -31,19 +32,19 @@ Your goal in this initial step is simply to use dbt to run your existing SQL tra While refactoring you'll be **moving around** a lot of logic, but ideally you won't be **changing** the logic. More changes = more auditing work, so if you come across anything you'd like to fix, try your best to card that up for another task after refactoring! We'll save the bulk of our auditing for the end when we've finalized our legacy-to-dbt model restructuring. -To get going, you'll copy your legacy SQL query into your dbt project, by saving it in a `.sql` file under the `/models` directory of your project. +To get going, you'll copy your legacy SQL query into your dbt project, by saving it in a `.sql` file under the `/models` directory of your project. Once you've copied it over, you'll want to `dbt run` to execute the query and populate the in your warehouse. -> If this is your first time running dbt, you may want to start with the [Introduction to dbt](/docs/introduction) and the [Getting Started tutorial](/tutorial/getting-started) before diving into refactoring. +If this is your first time running dbt, you may want to start with the [Introduction to dbt](/docs/introduction) and the earlier sections of the [Getting Started guide](/docs/get-started/getting-started/overview) before diving into refactoring. This step may sound simple, but if you're porting over an existing set of SQL transformations to a new SQL dialect, you will need to consider how your legacy SQL dialect differs from your new SQL flavor, and you may need to modify your legacy code to get it to run at all. -This will commonly happen if you're migrating from a [stored procedure workflow on a legacy database](https://getdbt.com/analytics-engineering/case-for-elt-workflow/) into dbt + a cloud data warehouse. +This will commonly happen if you're migrating from a [stored procedure workflow on a legacy database](https://getdbt.com/analytics-engineering/case-for-elt-workflow/) into dbt + a cloud . -Functions that you were using previously may not exist, or their syntax may shift slightly between SQL dialects. +Functions that you were using previously may not exist, or their syntax may shift slightly between SQL dialects. If you're not migrating data warehouses at the moment, then you can keep your SQL syntax the same. You have access to the exact same SQL dialect inside of dbt that you have querying directly from your warehouse. @@ -51,19 +52,19 @@ If you're not migrating data warehouses at the moment, then you can keep your SQ -To query from your data warehouse, we recommend creating [sources in dbt](/docs/building-a-dbt-project/using-sources) rather than querying the database table directly. +To query from your data warehouse, we recommend creating [sources in dbt](/docs/build/sources) rather than querying the database table directly. This allows you to call the same table in multiple places with `{{ src('my_source', 'my_table') }}` rather than `my_database.my_schema.my_table`. We start here for several reasons: #### Source freshness reporting -Using sources unlocks the ability to run [source freshness reporting](/docs/building-a-dbt-project/using-sources#snapshotting-source-data-freshness) to make sure your raw data isn't stale. +Using sources unlocks the ability to run [source freshness reporting](docs/build/sources#snapshotting-source-data-freshness) to make sure your raw data isn't stale. #### Easy dependency tracing -If you're migrating multiple stored procedures into dbt, with sources you can see which queries depend on the same raw tables. +If you're migrating multiple stored procedures into dbt, with sources you can see which queries depend on the same raw tables. -This allows you to consolidate modeling work on those base tables, rather than calling them separately in multiple places. +This allows you to consolidate modeling work on those base tables, rather than calling them separately in multiple places. @@ -92,7 +93,7 @@ Means that you will work directly on the SQL script that you ported over in the You'll move it into a `/marts` subfolder within your project's `/models` folder and go to town. -**Pros**: +**Pros**: - You won't have any old models to delete once refactoring is done. **Cons**: @@ -112,18 +113,17 @@ Means that you will copy your model to a `/marts` folder, and work on changes on - You can decide when the old model is ready to be deprecated. **Cons**: -- You'll have the old file(s) in your project until you can deprecate them - running side-by-side like this can feel duplicative, and may be a headache to manage if you're migrating a number of queries in bulk. +- You'll have the old file(s) in your project until you can deprecate them - running side-by-side like this can feel duplicative, and may be a headache to manage if you're migrating a number of queries in bulk. We generally recommend the **alongside** approach, which we'll follow in this tutorial. - ## Implement CTE groupings Once you choose your refactoring strategy, you'll want to do some cosmetic cleanups according to your data modeling best practices and start moving code into CTE groupings. This will give you a head start on porting SQL snippets from CTEs into modular [dbt data models](https://docs.getdbt.com/docs/building-a-dbt-project/building-models). ### What's a CTE? -CTE stands for “Common Table Expression”, which is a temporary result set available for use until the end of SQL script execution. Using the `with` keyword at the top of a query allows us to use CTEs in our code. +CTE stands for “Common Table Expression”, which is a temporary result set available for use until the end of SQL script execution. Using the `with` keyword at the top of a query allows us to use CTEs in our code. Inside of the model we're refactoring, we’re going to use a 4-part layout: 1. 'Import' CTEs @@ -135,7 +135,7 @@ In practice this looks like: ```sql -with +with import_orders as ( @@ -164,7 +164,7 @@ final_cte as ( -- join together logical_cte_1 and logical_cte_2 ) -select * from final_cte +select * from final_cte ``` Notice there are no nested queries here, which makes reading our logic much more straightforward. If a query needs to be nested, it's just a new CTE that references the previous CTE. @@ -177,15 +177,15 @@ Let's start with our components, and identify raw data that is being used in our - jaffle_shop.orders - stripe.payment -Let's make a CTE for each of these under the `Import CTEs` comment. These import CTEs should be only simple `select *` statements, but can have filters if necessary. +Let's make a CTE for each of these under the `Import CTEs` comment. These import CTEs should be only simple `select *` statements, but can have filters if necessary. We'll cover that later - for now, just use `select * from {{ source('schema', 'table') }}` for each, with the appropriate reference. Then, we will switch out all hard-coded references with our import CTE names. #### 2. Logical CTEs -Logical CTEs contain unique transformations used to generate the final product, and we want to separate these into logical blocks. To identify our logical CTEs, we will follow subqueries in order. +Logical CTEs contain unique transformations used to generate the final product, and we want to separate these into logical blocks. To identify our logical CTEs, we will follow subqueries in order. -If a has nested subqueries, we will want to continue moving down until we get to the first layer, then pull out the subqueries in order as CTEs, making our way back to the final select statement. +If a has nested subqueries, we will want to continue moving down until we get to the first layer, then pull out the subqueries in order as CTEs, making our way back to the final select statement. Name these CTEs as the alias that the subquery was given - you can rename it later, but for now it is best to make as few changes as possible. @@ -195,13 +195,13 @@ If the script is particularly complicated, it's worth it to go through once you' The previous process usually results in a select statement that is left over at the end - this select statement can be moved into its own CTE called the final CTE, or can be named something that is inherent for others to understand. This CTE determines the final product of the model. -#### 4. Simple SELECT statement +#### 4. Simple SELECT statement After you have moved everything into CTEs, you'll want to write a `select * from final` (or something similar, depending on your final CTE name) at the end of the model. This allows anyone after us to easily step through the CTEs when troubleshooting, rather than having to untangle nested queries. -> For more background on CTEs, check out the [dbt Labs style guide](https://github.com/dbt-labs/corp/blob/master/dbt_style_guide.md#ctes). +> For more background on CTEs, check out the [dbt Labs style guide](https://github.com/dbt-labs/corp/blob/main/dbt_style_guide.md#ctes). ## Port CTEs to individual data models Rather than keep our SQL code confined to one long SQL file, we'll now start splitting it into modular + reusable [dbt data models](https://docs.getdbt.com/docs/building-a-dbt-project/building-models). @@ -214,7 +214,7 @@ We'll follow those structures in this walkthrough, but your team's conventions m -To identify our [staging models](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/#staging-models), we want to look at the things we've imported in our import CTEs. +To identify our [staging models](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/#staging-models), we want to look at the things we've imported in our import CTEs. For us, that's customers, orders, and payments. We want to look at the transformations that can occur within each of these sources without needing to be joined to each other, and then we want to make components out of those so they can be our building blocks for further development. @@ -222,9 +222,9 @@ For us, that's customers, orders, and payments. We want to look at the transform -Our left-over logic can then be split into steps that are more easily understandable. +Our left-over logic can then be split into steps that are more easily understandable. -We'll start by using CTEs, but when a model becomes complex or can be divided out into reusable components you may consider an intermediate model. +We'll start by using CTEs, but when a model becomes complex or can be divided out into reusable components you may consider an intermediate model. Intermediate models are optional and are not always needed, but do help when you have large data flows coming together. @@ -237,7 +237,7 @@ Our final model accomplishes the result set we want, and it uses the components -We'll want to audit our results using the dbt [audit_helper package](https://hub.getdbt.com/fishtown-analytics/audit_helper/latest/). +We'll want to audit our results using the dbt [audit_helper package](https://hub.getdbt.com/dbt-labs/audit_helper/latest/). Under the hood, it generates comparison queries between our before and after states, so that we can compare our original query results to our refactored results to identify differences. @@ -246,4 +246,4 @@ Sure, we could write our own query manually to audit these models, but using the ## Ready for refactoring practice? Head to the free on-demand course, [Refactoring from Procedural SQL to dbt](https://courses.getdbt.com/courses/refactoring-sql-for-modularity) for a more in-depth refactoring example + a practice refactoring problem to test your skills. -Questions on this tutorial or the course? Drop a note in #learn-on-demand in [dbt Community Slack](https://getdbt.com/community). +Questions on this guide or the course? Drop a note in #learn-on-demand in [dbt Community Slack](https://getdbt.com/community). diff --git a/website/docs/tutorial/learning-more/using-jinja.md b/website/docs/docs/get-started/learning-more/using-jinja.md similarity index 96% rename from website/docs/tutorial/learning-more/using-jinja.md rename to website/docs/docs/get-started/learning-more/using-jinja.md index 3acf0e10b4c..0476b6aebc3 100644 --- a/website/docs/tutorial/learning-more/using-jinja.md +++ b/website/docs/docs/get-started/learning-more/using-jinja.md @@ -3,7 +3,7 @@ title: "Using Jinja" id: "using-jinja" --- -In this tutorial, we're going to take a common pattern used in SQL, and then use Jinja to improve our code. +In this guide, we're going to take a common pattern used in SQL, and then use Jinja to improve our code. If you'd like to work through this query, add [this CSV](https://github.com/dbt-labs/jaffle_shop/blob/core-v1.0.0/seeds/raw_payments.csv) to the `seeds/` folder of your dbt project, and then execute `dbt seed`. @@ -16,7 +16,6 @@ Consider a data model in which an `order` can have many `payments`. Each `paymen From an analytics perspective, it's important to know how much of each `order` was paid for with each `payment_method`. In your dbt project, you can create a model, named `order_payment_method_amounts`, with the following SQL: - ```sql @@ -151,7 +150,7 @@ group by 1 Getting whitespace control right is often a lot of trial and error! We recommend that you prioritize the readability of your model code over the readability of the compiled code, and only do this as an extra polish. ## Use a macro to return payment methods -Here, we've hardcoded the list of payment methods in our model. We may need to access this list from another model. A good solution here is to use a [variable](using-variables), but for the purpose of this tutorial, we're going to instead use a macro! +Here, we've hardcoded the list of payment methods in our model. We may need to access this list from another model. A good solution here is to use a [variable](/docs/build/project-variables), but for the purpose of this tutorial, we're going to instead use a macro! [Macros](jinja-macros#macros) in Jinja are pieces of code that can be called multiple times – they are analogous to a function in Python, and are extremely useful if you find yourself repeating code across multiple models. @@ -312,7 +311,7 @@ Macros let analysts bring software engineering principles to the SQL they write. A number of useful dbt macros have already been written in the [dbt-utils package](https://github.com/dbt-labs/dbt-utils). For example, the [get_column_values](https://github.com/dbt-labs/dbt-utils#get_column_values-source) macro from dbt-utils could be used instead of the `get_column_values` macro we wrote ourselves (saving us a lot of time, but at least we learnt something along the way!). -Install the [dbt-utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/) package in your project (docs [here](package-management)), and then update your model to use the macro from the package instead: +Install the [dbt-utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/) package in your project (docs [here](/docs/build/packages)), and then update your model to use the macro from the package instead: diff --git a/website/docs/dbt-cli/install/pip.md b/website/docs/docs/get-started/pip-install.md similarity index 67% rename from website/docs/dbt-cli/install/pip.md rename to website/docs/docs/get-started/pip-install.md index f6e1d378e72..5075a3e5086 100644 --- a/website/docs/dbt-cli/install/pip.md +++ b/website/docs/docs/get-started/pip-install.md @@ -1,17 +1,17 @@ --- -title: "Use pip to install dbt" +title: "Install with pip" description: "You can use pip to install dbt Core and adapter plugins from the command line." --- -You need to use `pip` to install dbt Core on Windows or Linux operating systems. You should use [Homebrew](install/homebrew) for installing dbt Core on a MacOS. +You need to use `pip` to install dbt Core on Windows or Linux operating systems. You should use [Homebrew](/docs/get-started/homebrew-install) for installing dbt Core on a MacOS. -You can install dbt Core and plugins using `pip` because they are Python modules distributed on [PyPi](https://pypi.org/project/dbt/). We recommend using virtual environments when installing with `pip`. +You can install dbt Core and plugins using `pip` because they are Python modules distributed on [PyPI](https://pypi.org/project/dbt/). We recommend using virtual environments when installing with `pip`. - - - + + + -Once you know [which adapter](available-adapters) you're using, you can install it as `dbt-`. For instance, if using Postgres: +Once you know [which adapter](supported-data-platforms) you're using, you can install it as `dbt-`. For example, if using Postgres: ```shell pip install dbt-postgres @@ -42,7 +42,7 @@ pip install --upgrade dbt- ### Install dbt-core only -If you're building a tool that integrates with dbt Core, you may want to install the core library alone, without a database adapter. Note that you won't be able to use dbt as a CLI tool. Also, be advised that dbt Core's [Python API is currently unstable and undocumented](dbt-api). +If you're building a tool that integrates with dbt Core, you may want to install the core library alone, without a database adapter. Note that you won't be able to use dbt as a CLI tool. ```shell pip install dbt-core @@ -50,7 +50,7 @@ pip install dbt-core ### `pip install dbt` -Note that, as of v1.0.0, `pip install dbt` is no longer supported and will raise an explicit error. Since v0.13, the PyPi package named `dbt` was a simple "pass-through" of `dbt-core` and the four original database adapter plugins. For v1, we formalized that split. +Note that, as of v1.0.0, `pip install dbt` is no longer supported and will raise an explicit error. Since v0.13, the PyPI package named `dbt` was a simple "pass-through" of `dbt-core` and the four original database adapter plugins. For v1, we formalized that split. If you have workflows or integrations that relied on installing the package named `dbt`, you can achieve the same behavior going forward by installing the same five packages that it used: diff --git a/website/docs/docs/get-started/run-your-dbt-projects.md b/website/docs/docs/get-started/run-your-dbt-projects.md new file mode 100644 index 00000000000..ca7dd0b1757 --- /dev/null +++ b/website/docs/docs/get-started/run-your-dbt-projects.md @@ -0,0 +1,24 @@ +--- +title: "Run your dbt projects" +id: "run-your-dbt-projects" +--- +You can run your dbt projects with [dbt Cloud](/docs/get-started/dbt-cloud-features) and [dbt Core](https://github.com/dbt-labs/dbt-core). dbt Cloud is a hosted application where you can develop directly from a web browser. dbt Core is an open source project where you can develop from the command line. + +Among other features, dbt Cloud provides a development environment to help you build, test, run, and [version control](/docs/collaborate/git-version-control) your project faster. It also includes an easier way to share your [dbt project's documentation](/docs/collaborate/build-and-view-your-docs) with your team. These development tasks are directly built into dbt Cloud for an _integrated development environment_ (IDE). Refer to [Develop in the Cloud](/docs/get-started/develop-in-the-cloud) for more details. + +With dbt Core, you can run your dbt projects from the command line. The command line interface (CLI) is available from your computer's terminal application such as Terminal and iTerm. When using the command line, you can run commands and do other work from the current working directory on your computer. Before running the dbt project from the command line, make sure you are working in your dbt project directory. Learning terminal commands such as `cd` (change directory), `ls` (list directory contents), and `pwd` (present working directory) can help you navigate the directory structure on your system. + +When running your project from dbt Core or dbt Cloud, the commands you commonly use are: + +- [dbt run](/reference/commands/run) — Runs the models you defined in your project +- [dbt build](/reference/commands/build) — Builds and tests your selected resources such as models, seeds, snapshots, and tests +- [dbt test](/reference/commands/test) — Executes the tests you defined for your project + +For information on all dbt commands and their arguments (flags), see the [dbt command reference](/reference/dbt-commands). If you want to list all dbt commands from the command line, run `dbt --help`. To list a dbt command’s specific arguments, run `dbt COMMAND_NAME --help` . + +## Related docs + +- [How we set up our computers for working on dbt projects](https://discourse.getdbt.com/t/how-we-set-up-our-computers-for-working-on-dbt-projects/243) +- [Model selection syntax](/reference/node-selection/syntax) +- [Cloud IDE features](/docs/get-started/dbt-cloud-features#ide-features) +- [Does dbt offer extract and load functionality?](/faqs/Project/transformation-tool) diff --git a/website/docs/dbt-cli/install/from-source.md b/website/docs/docs/get-started/source-install.md similarity index 82% rename from website/docs/dbt-cli/install/from-source.md rename to website/docs/docs/get-started/source-install.md index 782461dfb46..6714e88cd10 100644 --- a/website/docs/dbt-cli/install/from-source.md +++ b/website/docs/docs/get-started/source-install.md @@ -1,14 +1,13 @@ --- -title: "Install dbt from source" -id: "from-source" +title: "Install from source" description: "You can install dbt Core from its GitHub code source." --- -dbt Core and almost all of its adapter plugins are open source software. As such, the codebases are freely available to download and build from source. You might install form source if you want the latest code or want to install dbt from a specific commit. This might be helpful when you are contributing changes, or if you want to debug a past change. +dbt Core and almost all of its adapter plugins are open source software. As such, the codebases are freely available to download and build from source. You might install from source if you want the latest code or want to install dbt from a specific commit. This might be helpful when you are contributing changes, or if you want to debug a past change. -To download form source, you would clone the repositories from GitHub, making a local copy, and then install the local version using `pip`. +To download from source, you would clone the repositories from GitHub, making a local copy, and then install the local version using `pip`. -Downloading and building dbt Core will enable you to contribute to the project by fixing a bug or implementing a sought-after feature. For more details, read the [contributing guidelines](https://github.com/dbt-labs/dbt/blob/HEAD/CONTRIBUTING.md). +Downloading and building dbt Core will enable you to contribute to the project by fixing a bug or implementing a sought-after feature. For more details, read the [contributing guidelines](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md). ### Installing dbt Core @@ -36,6 +35,6 @@ You do _not_ need to install `dbt-core` before installing an adapter plugin -- t To install in editable mode, such as while contributing, use `pip install -e .` instead. - - - + + + diff --git a/website/docs/docs/guides/navigating-the-docs.md b/website/docs/docs/guides/navigating-the-docs.md deleted file mode 100644 index e65a439b7b3..00000000000 --- a/website/docs/docs/guides/navigating-the-docs.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -title: "Navigating docs.getdbt.com and other resources" ---- - -We invest heavily and these docs, as well as other mediums to help you level-up as an analytics engineer. Since there are so many different mediums, it's useful to outline how these pieces fit together. - -## Understanding the role of each medium -### docs.getdbt.com: Docs section -Understanding oriented - -Introduce the core concepts of dbt, with examples. Best for new dbt users that want to understand what a particular feature is and how to use it. - -### docs.getdbt.com: Reference section -Information oriented - -The technical reference for dbt configurations. These docs assume that you have a basic understanding of key concepts. Best for dbt users that already know what a particular feature is, and want to see the exact usage docs. These docs contains advanced examples. - -**If you're an advanced dbt user, you'll spend most of your time here.** - -### docs.getdbt.com: Tutorial section -Learning oriented - -Provide an way for a new dbt user to get started with dbt - -### docs.getdbt.com: FAQs -Problem oriented - -Easily indexed common questions that link back to relevant guides and reference docs. Mainly for questions that we can anticipate. - -### Stack Overflow -_[Link](https://stackoverflow.com/search?q=dbt)_ - -Troubleshooting oriented - -Specifically, “I’m stuck and don’t know what to do”. We use Stack Overflow to answer these questions since: -- Stack Overflow has functionality to "upvote" answers and mark questions as resolved. -- Questions in Stack Overflow are indexed by search engines - - -### Discourse -_[discourse.getdbt.com](https://discourse.getdbt.com/)_ - -Tactic/use-case oriented - -How analytics engineers use dbt to solve their tactical problems, e.g.: -* Version controlling UDFs -* Writing a custom schema test for not null -* Snowflake shares + dbt -* Permission schemes in a data warehouse - -Usually these are write-ups where there is no one perfect answer (unlike the “I’m stuck” questions on Stack Overflow), instead, you might need to dig into the “why” or discuss tradeoffs of your approach in these articles. - - -### dbt Blog -_[blog.getdbt.com](https://blog.getdbt.com/)_ - -Strategy oriented - -Bigger picture approaches, where the content is relevant to most data practitioners, not just for dbt users - -### Slack -_Sign up at [community.getdbt.com](https://community.getdbt.com/)_ - -Community oriented - -Create connections with other analytics engineers. Discuss ideas that require opinions, or push the boundaries of what has been done before. diff --git a/website/docs/docs/introduction.md b/website/docs/docs/introduction.md index 82059861323..c54eca1918e 100644 --- a/website/docs/docs/introduction.md +++ b/website/docs/docs/introduction.md @@ -3,188 +3,65 @@ title: "What is dbt?" id: "introduction" --- -dbt (data build tool) enables analytics engineers to transform data in their warehouses by simply writing select statements. dbt handles turning these select statements into tables and views. +## About dbt -dbt does the `T` in (Extract, Load, Transform) processes – it doesn’t extract or load data, but it’s extremely good at transforming data that’s already loaded into your warehouse. + -The role of dbt within a modern data stack is discussed in more detail [here](https://blog.fishtownanalytics.com/what-exactly-is-dbt-47ba57309068). +dbt compiles and runs your analytics code against your data platform, enabling you and your team to collaborate on a single source of truth for metrics, insights, and business definitions. This single source of truth, combined with the ability to define tests for your data, reduces errors when logic changes, and alerts you when issues arise. -dbt also enables analysts to work more like software engineers, in line with the dbt [Viewpoint](viewpoint). +Read more about why we want to enable analysts to work more like software engineers in [The dbt Viewpoint](/community/resources/viewpoint). -## How do I use dbt? +## dbt optimizes your workflow -### dbt Projects -A dbt project is a directory of `.sql` and .`yml` files. The directory must contain at a minimum: -* Models: A model is a single `.sql` file. Each model contains a single `select` statement that either transforms raw data into a dataset that is ready for analytics, or, more often, is an intermediate step in such a transformation. -* A project file: a `dbt_project.yml` file which configures and defines your dbt project. +- Avoid writing boilerplate  and by managing transactions, dropping tables, and managing schema changes. Write business logic with just a SQL `select` statement, or a Python DataFrame, that returns the dataset you need, and dbt takes care of . +- Build up reusable, or modular, data models that can be referenced in subsequent work instead of starting at the raw data with every analysis. +- Dramatically reduce the time your queries take to run: Leverage metadata to find long-running models that you want to optimize and use [incremental models](/docs/build/incremental-models) which dbt makes easy to configure and use. +- Write er code by leveraging [macros](/docs/build/jinja-macros), [hooks](/docs/build/hooks-operations), and [package management](/docs/build/packages). -Projects typically contain a number of other resources as well, including tests, snapshots, and seed files (see [below](#what-else-can-dbt-do)). +## dbt provides more reliable analysis -Whether you are starting a project from scratch, or if your organization already has a dbt project, see the docs on [projects](projects) for more information on getting started. +- No longer copy and paste SQL, which can lead to errors when logic changes. Instead, build reusable data models that get pulled into subsequent models and analysis. Change a model once and that change will propagate to all its dependencies. +- Publish the canonical version of a particular data model, encapsulating all complex business logic. All analysis on top of this model will incorporate the same business logic without needing to reimplement it. +- Use mature source control processes like branching, pull requests, and code reviews. +- Write data quality tests quickly and easily on the underlying data. Many analytic errors are caused by edge cases in the data: testing helps analysts find and handle those edge cases. -:::info Check out our sample dbt project +## dbt products -Want to check out a sample project? Have a look at our [Jaffle Shop](https://github.com/dbt-labs/jaffle_shop) project on GitHub! +You can access dbt using dbt Core or dbt Cloud. dbt Cloud is built around dbt Core, but it also provides: -::: +- Web-based UI so it’s more accessible +- Hosted environment so it’s faster to get up and running +- Differentiated features, such as metadata, in-app job scheduler, observability, integrations with other tools, integrated development environment (IDE), and more. -### Database Connections -dbt connects to your data warehouse to run data transformation queries. As such, you’ll need a data warehouse with source data loaded in it to use dbt. dbt natively supports connections to Snowflake, BigQuery, Redshift and Postgres data warehouses, and there’s a number of community-supported adapters for other warehouses (see [docs](available-adapters)). +You can learn about plans and pricing on [www.getdbt.com](https://www.getdbt.com/pricing/). -When you define your connection, you’ll also be able to specify the target schema where dbt should create your models as tables and views. See [Managing environments](managing-environments) for more information on picking target schema names. +### dbt Cloud +dbt Cloud is the fastest and most reliable way to deploy dbt. Develop, test, schedule, and investigate data models all in one web-based UI. Read more about [Getting started with dbt Cloud](/docs/get-started/getting-started/set-up-dbt-cloud) and [dbt Cloud features](/docs/get-started/dbt-cloud-features). -### dbt Commands -A command is an instruction to execute dbt which compiles and runs SQL against the database. +### dbt Core -When you issue a dbt command, such as `run`, dbt: +dbt Core is an open-source tool that enables data teams to transform data using analytics engineering best practices. You can install and use dbt Core on the command line. Read more about [Getting started with dbt Core](/docs/get-started/getting-started-dbt-core). -1. Determines the order to execute the models in your project in. -2. Generates the required to build the model, as per the model's ** -3. Executes the compiled queries against your data warehouse, using the credentials specified in the *target* defined in your *profile*. Executing these queries creates relations in the target schema in your data warehouse. These relations contain transformed data, ready for analysis. +## The power of dbt -A list of commands can be found in the [Command reference](dbt-commands) section of these docs. - -## What does the workflow for using dbt look like? -There’s two main ways of working with dbt -- using the web-based Integrated Development Environment (IDE) in dbt Cloud, or using the Command Line Interface (CLI). - -### Developing in dbt Cloud -To get started with a project and connection, follow the onboarding flow. Use the web editor to build your project and execute dbt commands. If you don't have one already, you can sign up for a [free account](https://www.getdbt.com/signup). - -### Developing locally with the Command Line Interface (CLI) -To use the CLI: - -1. Follow [these instructions](dbt-cli/install/overview) to install the dbt CLI -2. [Set up a profile](configure-your-profile) to connect to your data warehouse -3. Build your dbt project in a code editor, like Atom or VSCode -4. Execute commands using your terminal - -:::info Developing locally? - -If you’re developing your dbt project locally, we recommend checking out [this article](https://discourse.getdbt.com/t/how-we-set-up-our-computers-for-working-on-dbt-projects/243) to understand how we set up our computers. - -::: - -## What makes dbt so powerful? As a dbt user, your main focus will be on writing models (i.e. select queries) that reflect core business logic – there’s no need to write boilerplate code to create tables and views, or to define the order of execution of your models. Instead, dbt handles turning these models into objects in your warehouse for you. -**dbt handles boilerplate code to materialize queries as relations.** -For each model you create, you can easily configure a *materialization*. - -A materialization represents a build strategy for your select query – the code behind a materialization is robust, boilerplate SQL that wraps your select query in a statement to create a new, or update an existing, relation. - -dbt ships with the following built-in materializations: -* `view` (default): The model is built as a view in the database. -* `table`: The model is built as a table in the database. -* `ephemeral`: The model is not directly built in the database, but is instead pulled into dependent models as common expressions. -* `incremental`: The model is initially built as a table, and in subsequent runs, dbt inserts new rows and updates changed rows in the table. - -Custom materializations can also be built if required. - -**dbt determines the order of model execution.** -Often when transforming data, it makes sense to do so in a staged approach. dbt provides a mechanism to implement transformations in stages through the [ref](ref) function. - -Rather than selecting from existing tables and views in your warehouse, you can select from _another model_, like so: - - - -```sql -select - orders.id, - orders.status, - sum(case when payments.payment_method = 'bank_transfer' then payments.amount else 0 end) as bank_transfer_amount, - sum(case when payments.payment_method = 'credit_card' then payments.amount else 0 end) as credit_card_amount, - sum(case when payments.payment_method = 'gift_card' then payments.amount else 0 end) as gift_card_amount, - sum(amount) as total_amount - -from {{ ref('base_orders') }} as orders -left join {{ ref('base_payments') }} as payments on payments.order_id = orders.id -``` - - - -When compiled to executable SQL, dbt will replace the model specified in the `ref` function with the relation name. - -Importantly, dbt also uses the `ref` function to determine the sequence in which to execute the models – in the above example, `base_orders` and `base_payments` need to be built prior to building the `orders` model. - - - -dbt builds a directed acyclic graph (DAG) based on the interdependencies between models – each node of the graph represents a model, and edges between the nodes are defined by `ref` functions, where a model specified in a `ref` function is recognized as a predecessor of the current model. - -When dbt runs, models are executed in the order specified by the DAG – there’s no need to explicitly define the order of execution of your models. Building models in staged transformations also reduces the need to repeat SQL, as a single transformation (for example, renaming a column) can be shared as a predecessor for a number of downstream models. - -For more information see [Ref](ref). - -:::info Want to see a DAG visualization for your project? - -Check out the [Documentation Website](documentation) docs - -::: - -## What else can dbt do? -dbt has a number of additional features that make it even more powerful, including: - - -**Code compiler:** -In dbt, SQL files can contain Jinja, a lightweight templating language. Using Jinja in SQL provides a way to use control structures (e.g. `if` statements and `for` loops) in your queries. It also enables repeated SQL to be shared through `macros`. - -The power of using Jinja in your queries is discussed in [Using Jinja](using-jinja). - - -**Documentation:** -dbt provides a mechanism to write, version-control, and share documentation for your dbt models. Descriptions (in plain text, or markdown) can be written for each model and field. - -These descriptions, along with additional implicit information (for example, the model lineage, or the field data type and tests applied), can be generated as a website and shared with your wider team, providing an easily referenceable databook for anyone that interacts with dbt models. - -For more information see [Documentation](documentation). - -**Tests:** -SQL can be difficult to test, since the underlying data is frequently changing. dbt provides a way to improve the integrity of the SQL in each model by making assertions about the results generated by a model. Out of the box, you can test whether a specified column in a model only contains: - -* Non-null values -* Unique values -* Values that have a corresponding value in another model (e.g. a `customer_id` for an `order` corresponds to an `id` in the `customers` model) -* Values from a specified list - -Tests can be easily extended to suit business logic specific to your organization – any assertion that you can make about your model in the form of a select query can be turned into a test. - -To learn more about writing tests for your models, see [Testing](building-a-dbt-project/tests). - - -**Package management:** -dbt ships with a package manager, which allows analysts to use and publish both public and private repositories of dbt code which can then be referenced by others. - -This means analysts can leverage libraries that provide commonly-used macros like [dbt_utils](https://github.com/dbt-labs/dbt-utils), or dataset-specific projects for software services like [Snowplow](https://github.com/dbt-labs/snowplow) and [Stripe](https://github.com/dbt-labs/stripe), to hit the ground running. - -For more information, see [Package Management](package-management). - - -**Seed file loader:** -Often in analytics, raw values need to be mapped to a more readable value (e.g. converting a country-code to a country name) or enriched with static, or infrequently changing data (e.g. using revenue targets set each year to assess your actuals). - -These data sources, known as seed files, can be saved as a CSV file in your `project` and loaded into your data warehouse through use of the `seed` command. - -The documentation for the seed command can be found [here](seeds). - - -**Data snapshots:** -Often, records in a data source are mutable, in that they change over time. This can be difficult to handle in analytics if you want to reconstruct historic values. - -dbt provides a mechanism to snapshot raw data for a point in time, through use of [snapshots](snapshots). - - -**Understand raw data sources:** -Since dbt selects from raw data already loaded in your data warehouse, it's useful to understand the role of these tables and schemas within your warehouse. Defining raw data as a Source in dbt allows you to: -* Document and test the data that is loaded into your warehouse -* Check the freshness of your data against specified limits, to help identify upstream issues. -* Understand which models depend on each data source. - -Check out the [documentation on Sources](using-sources). - - -## Who should use dbt? -dbt is appropriate for anyone who interacts with a data warehouse. It can be used by data engineers, data analysts and data scientists, or anyone that knows how to write select queries in SQL. - -For dbt users that are new to programming, you may also need to spend some time getting to know the basics of the command line, and familiarizing yourself with git. - -To make full use of dbt, it may also be beneficial to know some programming basics, such as `for` loops and `if` statements, to use Jinja effectively in your models. +| Feature | Description | +|-----------------------|-------------| +| Handle boilerplate code to materialize queries as relations | For each model you create, you can easily configure a *materialization*. A materialization represents a build strategy for your select query – the code behind a materialization is robust, boilerplate SQL that wraps your select query in a statement to create a new, or update an existing, relation. Read more about [Materializations](/docs/build/materializations).| +| Use a code compiler | SQL files can contain Jinja, a lightweight templating language. Using Jinja in SQL provides a way to use control structures in your queries. For example, `if` statements and `for` loops. It also enables repeated SQL to be shared through `macros`. Read more about [Macros](/docs/build/jinja-macros).| +| Determine the order of model execution | Often, when transforming data, it makes sense to do so in a staged approach. dbt provides a mechanism to implement transformations in stages through the [ref function](/reference/dbt-jinja-functions/ref). Rather than selecting from existing tables and views in your warehouse, you can select from another model.| +| Document your dbt project | dbt provides a mechanism to write, version-control, and share documentation for your dbt models. You can write descriptions (in plain text or markdown) for each model and field. In dbt Cloud, you can auto-generate the documentation when your dbt project runs. Read more about the [Documentation](/docs/collaborate/documentation).| +| Test your models | Tests provide a way to improve the integrity of the SQL in each model by making assertions about the results generated by a model. Read more about writing tests for your models [Testing](/docs/build/tests)| +| Manage packages | dbt ships with a package manager, which allows analysts to use and publish both public and private repositories of dbt code which can then be referenced by others. Read more about [Package Management](/docs/build/packages). | +| Load seed files| Often in analytics, raw values need to be mapped to a more readable value (for example, converting a country-code to a country name) or enriched with static or infrequently changing data. These data sources, known as seed files, can be saved as a CSV file in your `project` and loaded into your data warehouse using the `seed` command. Read more about [Seeds](/docs/build/seeds).| +| Snapshot data | Often, records in a data source are mutable, in that they change over time. This can be difficult to handle in analytics if you want to reconstruct historic values. dbt provides a mechanism to snapshot raw data for a point in time, through use of [snapshots](/docs/build/snapshots).| + +### Related docs + +- [Getting started with dbt Cloud](/docs/get-started/getting-started/set-up-dbt-cloud) +- [Getting started with dbt Core](/docs/get-started/getting-started-dbt-core) +- [Best practice guides](/guides/best-practices) +- [What is a dbt Project?](/docs/build/projects) +- [dbt run](/docs/get-started/run-your-dbt-projects) diff --git a/website/docs/docs/running-a-dbt-project/dbt-api.md b/website/docs/docs/running-a-dbt-project/dbt-api.md deleted file mode 100644 index 763ae0d7c41..00000000000 --- a/website/docs/docs/running-a-dbt-project/dbt-api.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -title: "Using the Python API" -id: "dbt-api" ---- - -The primary interface into `dbt-core` is on the command line. It is designed to be invoked with commands, arguments, and flags. Starting in v1, this interface is contracted, with backwards compatibility guaranteed. - -It _is_ possible to import and invoke dbt as a Python module. This API is still not contracted or documented, and it is liable to change in future versions of `dbt-core` without warning. Please use caution when upgrading across versions of dbt if you choose to run dbt in this manner! - -We aim to contract and document an increasing number of Python interfaces within `dbt-core`. Today, those interfaces are: -- [Adapter plugin](building-a-new-adapter) classes and methods. These are liable to change in minor versions _only_, and we will aim for backwards compatibility whenever possible. -- [Events](events-logging), Python objects that dbt emits as log messages. diff --git a/website/docs/docs/running-a-dbt-project/profile.md b/website/docs/docs/running-a-dbt-project/profile.md deleted file mode 100644 index 66dc1fa3cca..00000000000 --- a/website/docs/docs/running-a-dbt-project/profile.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: "Profile" -id: "profile" ---- - - -:::danger This page has been deprecated - -Check out [Connecting to your warehouse with the CLI](configure-your-profile) for the newer documentation. - -::: diff --git a/website/docs/docs/running-a-dbt-project/using-the-dbt-ide.md b/website/docs/docs/running-a-dbt-project/using-the-dbt-ide.md index 323947a933e..61e93ab031f 100644 --- a/website/docs/docs/running-a-dbt-project/using-the-dbt-ide.md +++ b/website/docs/docs/running-a-dbt-project/using-the-dbt-ide.md @@ -3,24 +3,24 @@ title: "Using the dbt IDE" id: "using-the-dbt-ide" --- -The dbt Integrated Development Environment (IDE) is a single interface for building, testing, running, and version controlling dbt projects. We encourage you to use the IDE and share your feedback with the dbt Cloud team. +The dbt Integrated Development Environment (IDE) is a single interface for building, testing, running, and version controlling dbt projects. ### Requirements -- The dbt IDE is powered by the [dbt Server](rpc) which has been overhauled in dbt v0.15.0. In order to use the IDE, your dbt project must be compatible with dbt v0.15.0. -- To use the IDE, you must have a [Developer License](cloud-seats-and-users). -- Write access must be enabled for your dbt repository in dbt Cloud. See [Connecting your GitHub Account](cloud-installing-the-github-application) and [Importing a project by git URL](cloud-import-a-project-by-git-url) for detailed setup instructions. +- The dbt IDE is powered by [dbt-rpc](/reference/commands/rpc) which has been overhauled in dbt v0.15.0. In order to use the IDE, your dbt project must be compatible with dbt v0.15.0. +- To use the IDE, you must have a [Developer License](/docs/collaborate/manage-access/seats-and-users). +- Write access must be enabled for your dbt repository in dbt Cloud. See [Connecting your GitHub Account](/docs/collaborate/git/connect-github) and [Importing a project by git URL](/docs/collaborate/git/import-a-project-by-git-url) for detailed setup instructions. ### Creating a development environment New dbt Cloud accounts will automatically be created with a Development Environment for the project created during setup. If you have an existing dbt Cloud account, you may need to create a Development Environment manually to use the dbt IDE. -To create a Development Environment, navigate to the Environments page for your Project and click the "New Environment" button. +To create a development environment, choose **Deploy** > **Environments** from the top left. Then, click **Create Environment**. - + -To create a Development Environment, change the environment "type" to "development". Enter a name like "Dev" for the environment to help identify it amongst your other environments. Finally, click "Save" to finish creating your Development Environment. +Enter an environment **Name** that would help you identify it among your other environments (for example, `Nate's Development Environment`). Choose **Development** as the **Environment Type**. You can also select which **dbt Version** to use at this time. For compatibility reasons, we recommend that you select the same dbt version that you plan to use in your deployment environment. Finally, click **Save** to finish creating your development environment. - + ### Setting up developer credentials @@ -28,11 +28,11 @@ The IDE uses *developer credentials* to connect to your database. These develope New dbt Cloud accounts should have developer credentials created automatically as a part of Project creation in the initial application setup. - + -Existing accounts *may not* have development credentials configured already. To manage your development credentials, navigate to your [Profile](https://cloud.getdbt.com/#/profile/) in dbt Cloud and find the relevant Project under the **Credentials** header. After entering your developer credentials, you'll be able to access the dbt IDE. +New users on existing accounts *might not* have their development credentials already configured. To manage your development credentials, go to the [Credentials](https://cloud.getdbt.com/next/settings/profile#credentials) section. Select the relevant project in the list. After entering your developer credentials, you'll be able to access the dbt IDE. - + ### Compiling and running SQL diff --git a/website/docs/docs/supported-data-platforms.md b/website/docs/docs/supported-data-platforms.md new file mode 100644 index 00000000000..c7057eb4766 --- /dev/null +++ b/website/docs/docs/supported-data-platforms.md @@ -0,0 +1,86 @@ +--- +title: "Supported data platforms" +id: "supported-data-platforms" +--- + +dbt connects to and runs SQL against your database, warehouse, lake, or query engine. We group all of these SQL-speaking things into one bucket called _data platforms_. dbt can be extended to any data platform using a dedicated _adapter plugin_. These plugins are built as Python modules that dbt Core discovers if they are installed on your system. All the adapters listed below are open source and free to use, just like dbt Core. + +To learn more about adapters, check out [What Are Adapters](/guides/advanced/adapter-development/1-what-are-adapters). + +## Supported Data Platforms + +### Verified Adapters + +| Data Platform (click to view setup guide) | latest verified version | +| ----------------------------------------- | ------------------------ | +| [AlloyDB](alloydb-setup) | (same as `dbt-postgres`) | +| [Azure Synapse](azuresynapse-setup) | 1.3.0 :construction: | +| [BigQuery](bigquery-setup) | 1.2.0 | +| [Databricks](databricks-setup) | 1.2.0 :construction: | +| [Dremio](dremio-setup) | 1.3.0 :construction: | +| [Postgres](postgres-setup) | 1.2.0 | +| [Redshift](redshift-setup) | 1.2.0 | +| [Snowflake](snowflake-setup) | 1.2.0 | +| [Spark](spark-setup) | 1.2.0 | +| [Starburst & Trino](trino-setup) | 1.2.0 :construction: | +:construction:: Verification in progress + +### Community Adapters + +| Data Platforms (click to view setup guide) | | | +| ----------------------------------------------- | --------------------------------| ---------------------------------| +| [Athena](athena-setup) | [Hive](hive-setup) | [SingleStore](singlestore-setup) | +| [Clickhouse](clickhouse-setup) | [Impala](impala-setup) | [SQLite](sqlite-setup) | +| [IBM DB2](ibmdb2-setup) | [iomete](iomete-setup) | [SQL Server & Azure SQ](mssql-setup) | +| [DuckDB](duckdb-setup) | [Layer](layer-setup) | [AzureSynapse](azuresynapse-setup) | +| [Dremio](dremio-setup) | [Materialize](materialize-setup) | [Teradata](teradata-setup)| +| [Exasol Analytics](exasol-setup) | [MindsDB](mindsdb-setup) | [TiDB](tidb-setup)| +| [Firebolt](firebolt-setup) | [MySQL](mysql-setup) | [Vertica](vertica-setup)| +| [AWS Glue](glue-setup) | [Oracle](oracle-setup) | +| [Greenplum](greenplum-setup) | [Rockset](rockset-setup) + + +## Adapter Installation + +With a few exceptions [^1], all adapters listed below can be installed from PyPI using `pip install `. The installation will include `dbt-core` and any other required dependencies, which may include both other dependencies and even other adapter plugins. Read more about [installing dbt](/docs/get-started/installation). + +## Adapter Taxonomy + +### Verified by dbt Labs + +In order to provide a more consistent and reliable experience, dbt Labs has a rigorous process by which we verify adapter plugins. The process covers aspects of development, documentation, user experience, and maintenance. These adapters earn a **Verified** designation so that users can have a certain level of trust and expectation when they use them. To learn more, see [Verifying a new adapter](/guides/advanced/adapter-development/7-verifying-a-new-adapter). + +We also welcome and encourage adapter plugins from the dbt community (see the below [Contributing to a pre-existing adapter](#contributing-to-a-pre-existing-adapter)). Please be mindful that these community maintainers are intrepid volunteers who donate their time and effort — so be kind, understanding, and help out where you can! + +### Maintainers + +Who made and maintains an adapter is certainly relevant, but we recommend using an adapter's verification status to determine the quality and health of an adapter. So far there are three categories of maintainers: + +| Supported by | Maintained By | +| ------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| dbt Labs | dbt Labs maintains a set of adapter plugins for some of the most common databases, warehouses, and platforms. As for why particular data platforms were chosen, see ["Why Verify an Adapter"](7-verifying-a-new-adapter#why-verify-an-adapter) | +| Partner | These adapter plugins are built and maintained by the same people who build and maintain the complementary data technology. | +| Community | These adapter plugins are contributed and maintained by members of the community. 🌱 | + + +## Contributing to dbt-core adapters + +### Contributing to a pre-existing adapter + +Community-supported plugins are works in progress, and anyone is welcome to contribute by testing and writing code. If you're interested in contributing: + +- Join both the dedicated channel, [#adapter-ecosystem](https://getdbt.slack.com/archives/C030A0UF5LM), in [dbt Slack](https://community.getdbt.com/) and the channel for your adapter's data store (see **Slack Channel** column of above tables) +- Check out the open issues in the plugin's source repository (follow relevant link in **Adapter Repository** column of above tables) + +### Creating a new adapter + +If you see something missing from the lists above, and you're interested in developing an integration, read more about adapters and how they're developed in the [Adapter Development](/guides/advanced/adapter-development/1-what-are-adapters) section. + +If you have a new adapter, please add it to this list using a pull request! See [Documenting your adapter](5-documenting-a-new-adapter) for more information. + +[^1]: Here are the two different adapters. Use the PyPI package name when installing with `pip` + + | Adapter repo name | PyPI package name | + | ----------------- | -------------------- | + | `dbt-athena` | `dbt-athena-adapter` | + | `dbt-layer` | `dbt-layer-bigquery` | diff --git a/website/docs/docs/use-dbt-semantic-layer/avail-sl-integrations.md b/website/docs/docs/use-dbt-semantic-layer/avail-sl-integrations.md new file mode 100644 index 00000000000..9abd608300e --- /dev/null +++ b/website/docs/docs/use-dbt-semantic-layer/avail-sl-integrations.md @@ -0,0 +1,28 @@ +--- +title: "Available integrations" +id: avail-sl-integrations +description: "Review a wide range of partners you can integrate and query with the dbt Semantic Layer." +sidebar_label: "Available integrations" +--- + +# Available integrations + +A wide variety of data applications across the modern data stack natively integrate with the dbt Semantic Layer and dbt metrics — from Business Intelligence tools to notebooks, data catalogs, and more. + +There are two categories of partner integrations: + +- **dbt Semantic Layer partners** - Semantic Layer integrations are capable of interactively querying dbt metrics, surfacing the underlying data in partner tools. This refers to a group of dbt Labs partners who have full end-to-end integrations with the dbt Semantic Layer and leverage the dbt Server. + +- **Metrics Ready partners** - Metrics Ready integrations facilitate building, discovery, and collaborating on dbt metric definitions. + +For information on the different integration partners, their documentation, and more, review the [dbt Semantic Layer integrations](https://www.getdbt.com/product/semantic-layer-integrations) page. + + + + +## Related docs + +- [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-semantic-layer) to learn more about the dbt Semantic Layer. +- [Product architecture](/docs/use-dbt-semantic-layer/dbt-semantic-layer#product-architecture) page for more information on plan availability. +- [Public Preview information](/docs/use-dbt-semantic-layer/quickstart-semantic-layer#public-preview) to understand what Public Preview for the dbt Semantic Layer means. +- [How to design and structure dbt metrics: Recommendations for getting started](https://docs.getdbt.com/blog/how-to-design-and-structure-metrics) to understand best practices for designing and structuring metrics in your dbt project. diff --git a/website/docs/docs/use-dbt-semantic-layer/dbt-semantic-layer.md b/website/docs/docs/use-dbt-semantic-layer/dbt-semantic-layer.md new file mode 100644 index 00000000000..bca1ff3549f --- /dev/null +++ b/website/docs/docs/use-dbt-semantic-layer/dbt-semantic-layer.md @@ -0,0 +1,162 @@ +--- +title: "dbt Semantic Layer" +id: dbt-semantic-layer +description: "Introducing the dbt Semantic Layer" +sidebar_label: "dbt Semantic Layer" +--- + + + +The dbt Semantic Layer allows data teams to centrally define essential business metrics like `revenue`, `customer`, and `churn` in the modeling layer (your dbt project) for consistent self-service within downstream data tools like BI and metadata management solutions. + +The result? You have less duplicative coding for data teams and more consistency for data consumers. + +The dbt Semantic Layer has four main parts: + +- Define your metrics in version-controlled dbt project code +- Import your metric definitions via the [Metadata API](/docs/dbt-cloud-apis/metadata-api) +- Query your metric data via the dbt Proxy Server +- Explore and analyze dbt metrics in downstream tools + + + + +### What makes the dbt Semantic Layer different? + +The dbt Semantic Layer reduces code duplication and inconsistency regarding your business metrics. By moving metric definitions out of the BI layer and into the modeling layer, data teams can feel confident that different business units are working from the same metric definitions, regardless of their tool of choice. If a metric definition changes in dbt, it’s refreshed everywhere it’s invoked and creates consistency across all applications. + + +## Prerequisites +To use the dbt Semantic Layer, you’ll need to meet the following: + + + +- Have a multi-tenant dbt Cloud account, hosted in North America
+- Have both your production and development environments running dbt version 1.3 or higher
+- Use Snowflake data platform
+- Install the dbt metrics package version ">=1.3.0", "<1.4.0" in your dbt project
+- Set up the Metadata API in the integrated tool to import metric definitions + * Developer accounts will be able to query the Proxy Server using SQL, but won't be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API
+- Recommended - Review the dbt metrics page and Understanding the components of the dbt Semantic Layer blog
+ +
+ + + +- Have a multi-tenant dbt Cloud account, hosted in North America
+- Have both your production and development environments running dbt version 1.2 (latest)
+- Use Snowflake data platform
+- Install the dbt metrics package version ">=0.3.0", "<0.4.0" in your dbt project
+- Set up the Metadata API in the integrated tool to import metric definitions + * Developer accounts will be able to query the Proxy Server using SQL, but won't be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API
+- Recommended - Review the dbt metrics page and Understanding the components of the dbt Semantic Layer blog
+ +
+ + + +## Public Preview + +The dbt Semantic Layer is currently available for Public Preview, which means: + +— **Who?** The dbt Semantic Layer is open to all dbt Cloud tiers (Developer, Team, and Enterprise) during Public Preview. Review [Product architecture](/docs/use-dbt-semantic-layer/dbt-semantic-layer#product-architecture) for more info on plan availability. + +- Team and Enterprise accounts will be able to set up the Semantic Layer and [Metadata API](/docs/dbt-cloud-apis/metadata-api) in the integrated +partner tool to import metric definition. +- Developer accounts will be able to query the Proxy Server using SQL, but will not be able to browse dbt metrics in external tools, which requires access to the Metadata API. + +— **What?** Public Previews provide early access to new features. The Semantic Layer is stable and you can use it for production deployments, but there may still be some planned additions and modifications to product behaviors before moving to General Availability. We may also introduce new functionality that is not backwards compatible. dbt Labs provides support, and relevant service level objectives (SLOs) apply. We will introduce pricing for the dbt Semantic Layer alongside the General Available (GA) release (future GA date to be announced). + +— **When?** Public Preview will end once the dbt Semantic Layer is available for GA. After GA, the dbt Semantic Layer will only be available to dbt Cloud **Team** and **Enterprise** plans. + +— **Where?** Public Preview is enabled at the account level so you don’t need to worry about enabling it per user. + +## Product architecture + +The dbt Semantic Layer product architecture includes four primary components: + +| Components | Information | Developer plans | Team plans | Enterprise plans | License | +| --- | --- | :---: | :---: | :---: | --- | +| **[dbt metrics](/docs/build/metrics)** | Allows you to define metrics in dbt Core. | ✅ | ✅ | ✅ | Open source, Core | +| **[dbt Server](https://github.com/dbt-labs/dbt-server)**| A persisted HTTP server that wraps dbt core to handle RESTful API requests for dbt operations. | ✅ | ✅ | ✅ | BSL | +| **SQL Proxy** | Reverse-proxy that accepts dbt-SQL (SQL + Jinja like query models and metrics, use macros), compiles the query into pure SQL, and executes the query against the data platform. | ✅

_* Available during Public Preview only_ | ✅ | ✅ | Proprietary, Cloud (Team & Enterprise) | +| **[Metadata API](/docs/dbt-cloud-apis/metadata-api)** | Accesses metric definitions primarily via integrations and is the source of truth for objects defined in dbt projects (like models, macros, sources, metrics). The Metadata API is updated at the end of every dbt Cloud run. | ❌ | ✅ | ✅ | Proprietary, Cloud (Team & Enterprise | + + + +dbt Semantic Layer integrations will: + +- Leverage the Metadata API to fetch a list of objects and their attributes, like metrics +- Generate a dbt-SQL statement +- Then query the SQL proxy to evaluate the results of this statement + + +## Manage metrics + +:::info 📌 + +New to dbt or metrics? Check out our [Getting Started guide](/docs/get-started/getting-started/overview) to build your first dbt project! If you'd like to define your first metrics, try our [Jaffle Shop](https://github.com/dbt-labs/jaffle_shop_metrics) example project. + +::: + +If you're not sure whether to define a metric in dbt or not, ask yourself the following: + +> *Is this something our teams consistently need to report on?* + +An important business metric should be: + +- Well-defined (the definition is agreed upon throughout the entire organization) +- Time-bound (able to be compared across time) + +A great example of this is **revenue** — it can be aggregated on multiple levels (weekly, monthly, etc) and is key for the broader business to understand. + +- ✅ `Monthly recurring revenue` or `Weekly active users` or `Average order value` +- ❌ `1-off experimental metric` + + +### Design and define metrics + +**Design metrics** +To read about best practices on structuring and organizing your metrics, review our [How to design and structure dbt metrics: Recommendations for getting started](https://docs.getdbt.com/blog/how-to-design-and-structure-metrics) blog post first. + +**Define metrics** +You can define your metrics in `.yml` files nested under a metrics key and to design or define your own metrics in your dbt project, review the following documents:
+ +- [How to design and structure dbt metrics: Recommendations for getting started](https://docs.getdbt.com/blog/how-to-design-and-structure-metrics) blog to understand best practices for designing and structuring metrics in your dbt project +- [dbt metrics](docs/build/metrics) for in-depth detail on attributes, filters, how to define and query your metrics and [dbt-metrics package](https://github.com/dbt-labs/dbt_metrics) +- [dbt Semantic Layer quickstart](/docs/use-dbt-semantic-layer/quickstart-semantic-layer) to get started +- [Understanding the components of the dbt Semantic Layer](https://docs.getdbt.com/blog/understanding-the-components-of-the-dbt-semantic-layer) blog post to see further examples + +Review our helpful metrics video below, which explains what metrics are, why they're important and how you can get started: + + + +## Related questions + +
+ How are you storing my data? +
+
The dbt Semantic Layer does not store, or cache, or log your data. On each query to the Semantic Layer, the resulting data passes through dbt Cloud servers where it is never stored, cached, or logged. The data from your data platform gets routed through dbt Cloud servers, to your connecting data tool.
+
+
+
+ Is the dbt Semantic Layer open source? +
+
Some components of the dbt Semantic Layer are open source like dbt-core, the dbt_metrics package, and the BSL licensed dbt-server. The dbt Proxy Server (what is actually compiling the dbt code) and the Metadata API are not open source.



+ +During Public Preview, the dbt Semantic Layer is open to all dbt Cloud tiers (Developer, Team, and Enterprise).



+ +
    +
  • dbt Core users can define metrics in their dbt Core projects and calculate them using macros from the metrics package. To use the dbt Semantic Layer integrations, users will need to have a dbt Cloud account.


  • +
  • Developer accounts will be able to query the Proxy Server using SQL, but will not be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API.


  • +
  • Team and Enterprise accounts will be able to set up the Semantic Layer and Metadata API in the integrated partner tool to import metric definition.
  • +
+
+
+ Is there a dbt Semantic Layer discussion hub? +
+
Yes absolutely! Join the dbt Slack community and #dbt-cloud-semantic-layer slack channel for all things related to the dbt Semantic Layer. +
+
+
+

diff --git a/website/docs/docs/use-dbt-semantic-layer/quickstart-semantic-layer.md b/website/docs/docs/use-dbt-semantic-layer/quickstart-semantic-layer.md new file mode 100644 index 00000000000..2ed60e32ded --- /dev/null +++ b/website/docs/docs/use-dbt-semantic-layer/quickstart-semantic-layer.md @@ -0,0 +1,321 @@ +--- +title: "Quickstart" +id: quickstart-semantic-layer +description: "Define metrics and set up the dbt Semantic Layer" +sidebar_label: "Quickstart" +--- + +# dbt Semantic Layer quickstart + + + +## Public Preview + +We're excited to announce the dbt Semantic Layer is currently available for Public Preview, which means: + +— **Who?** The dbt Semantic Layer is open to all dbt Cloud tiers (Developer, Team, and Enterprise) during Public Preview. Review [Product architecture](/docs/use-dbt-semantic-layer/dbt-semantic-layer#product-architecture) for more info on plan availability. + +- Team and Enterprise accounts will be able to set up the Semantic Layer and [Metadata API](/docs/dbt-cloud-apis/metadata-api) in the integrated +partner tool to import metric definition. +- Developer accounts will be able to query the Proxy Server using SQL, but will not be able to browse dbt metrics in external tools, which requires access to the Metadata API. + +— **What?** Public Previews provide early access to new features. The Semantic Layer is stable and you can use it for production deployments, but there may still be some planned additions and modifications to product behaviors before moving to General Availability. We may also introduce new functionality that is not backwards compatible. dbt Labs provides support, and relevant service level objectives (SLOs) apply. We will introduce pricing for the dbt Semantic Layer alongside the General Available (GA) release (future GA date to be announced). + +— **When?** Public Preview will end once the dbt Semantic Layer is available for GA. After GA, the dbt Semantic Layer will only be available to dbt Cloud **Team** and **Enterprise** plans. + +— **Where?** Public Preview is enabled at the account level so you don’t need to worry about enabling it per user. + + +## Introduction + +To try out the features of the dbt Semantic Layer, you first need to have a dbt project set up. This quickstart guide will lay out the following steps, and recommends a workflow that demonstrates some of its essential features: + +- Install dbt metrics package +- Define metrics +- Query, and run metrics +- Configure the dbt Semantic Layer + +## Prerequisites +To use the dbt Semantic Layer, you’ll need to meet the following: + + + +- Have a multi-tenant dbt Cloud account, hosted in North America
+- Have both your production and development environments running dbt version 1.3 or higher
+- Use Snowflake data platform
+- Install the dbt metrics package version ">=1.3.0", "<1.4.0" in your dbt project
+- Set up the Metadata API in the integrated tool to import metric definitions + * Developer accounts will be able to query the Proxy Server using SQL, but won't be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API
+- Recommended - Review the dbt metrics page and Understanding the components of the dbt Semantic Layer blog
+ +
+ + + +- Have a multi-tenant dbt Cloud account, hosted in North America
+- Have both your production and development environments running dbt version 1.2 (latest)
+- Use Snowflake data platform
+- Install the dbt metrics package version ">=0.3.0", "<0.4.0" in your dbt project
+- Set up the Metadata API in the integrated tool to import metric definitions + * Developer accounts will be able to query the Proxy Server using SQL, but won't be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API
+- Recommended - Review the dbt metrics page and Understanding the components of the dbt Semantic Layer blog
+ +
+ + + + +:::info 📌 + +New to dbt or metrics? Check out our [Getting Started guide](/docs/get-started/getting-started/overview) to build your first dbt project! If you'd like to define your first metrics, try our [Jaffle Shop](https://github.com/dbt-labs/jaffle_shop_metrics) example project. + +::: + +## Installing dbt metrics package +The dbt Semantic Layer supports the calculation of metrics by using the [dbt metrics package](https://hub.getdbt.com/dbt-labs/metrics/latest/). You can install the dbt metrics package in your dbt project by copying the below code blocks. + + + + +```yml +packages: + - package: dbt-labs/metrics + version: [">=1.3.0", "<1.4.0"] +``` + + + + + +```yml +packages: + - package: dbt-labs/metrics + version: [">=0.3.0", "<0.4.0"] +``` + + + + + +```yml +packages: + - package: dbt-labs/metrics + version: [">=0.2.0", "<0.3.0"] +``` + + + + +1. Paste the dbt metrics package code in your `packages.yml` file. +2. Run the [`dbt deps` command](/reference/commands/deps) to install the package. +3. If you see a successful result, you have now installed the dbt metrics package successfully! +4. If you have any errors during the `dbt deps` command run, review the system logs for more information on how to resolve them. Make sure you use a dbt metrics package that’s compatible with your dbt environment version. + + + +## Design and define metrics + +Review our helpful metrics video below, which explains what metrics are, why they're important and how you can get started: + + + +### Design metrics + +To read about best practices on structuring and organizing your metrics, review our [How to design and structure dbt metrics: Recommendations for getting started](https://docs.getdbt.com/blog/how-to-design-and-structure-metrics) blog post first. + +### Define metrics +Now that you've organized your metrics folder and files, you can define your metrics in `.yml` files nested under a `metrics` key. + +1. Add the metric definitions found in the [Jaffle Shop](https://github.com/dbt-labs/jaffle_shop_metrics) example to your dbt project. For example, to add an expenses metric, reference the following metrics you can define directly in your metrics folder: + + + +```sql +version: 2 + +metrics: + - name: expenses + label: Expenses + model: ref('orders') + description: "The total expenses of our jaffle business" + + calculation_method: sum + expression: amount / 4 + + timestamp: order_date + time_grains: [day, week, month, year] + + dimensions: + - customer_status + - had_credit_card_payment + - had_coupon_payment + - had_bank_transfer_payment + - had_gift_card_payment + + filters: + - field: status + operator: '=' + value: "'completed'" +``` + + + + +```sql +version: 2 + +metrics: + - name: expenses + label: Expenses + model: ref('orders') + description: "The total expenses of our jaffle business" + + type: sum + sql: amount / 4 + + timestamp: order_date + time_grains: [day, week, month, year] + + dimensions: + - customer_status + - had_credit_card_payment + - had_coupon_payment + - had_bank_transfer_payment + - had_gift_card_payment + + filters: + - field: status + operator: '=' + value: "'completed'" +``` + + +1. Click **Save** and then **Compile** the code. +2. Commit and merge the code changes that contain the metric definitions. +3. If you'd like to further design and define your own metrics, review the following documentation: + + - [dbt metrics](/docs/build/metrics) will povide you in-depth detail on attributes, properties, filters, and how to define and query metrics. + + - Review [How to design and structure dbt metrics: Recommendations for getting started](https://docs.getdbt.com/blog/how-to-design-and-structure-metrics) blog to understand best practices for designing and structuring metrics in your dbt project. + +## Develop and query metrics + +You can dynamically develop and query metrics directly in dbt and verify their accuracy _before_ running a job in the deployment environment by using the `metrics.calculate` and `metrics.develop` macros. + +To understand when and how to use the macros above, review [dbt metrics](/docs/build/metrics) and make sure you install the [dbt_metrics package](https://github.com/dbt-labs/dbt_metrics) first before using the above macros. + +:::info 📌 + +**Note:** You will need access to dbt Cloud and the dbt Semantic Layer from your integrated partner tool of choice. + +::: + +## Run your production job + +Once you’ve defined metrics in your dbt project, you can perform a job run in your deployment environment to materialize your metrics. The deployment environment is only supported for the dbt Semantic Layer at this moment. + +1. Go to **Deploy** in the navigation and select **Jobs** to re-run the job with the most recent code in the deployment environment. +2. Your metric should appear as a red node in the dbt Cloud IDE and dbt directed acyclic graphs (DAG). + + + + +**What’s happening internally?** + +- Merging the code into your main branch allows dbt Cloud to pull those changes and builds the definition in the manifest produced by the run. +- Re-running the job in the deployment environment helps materialize the models, which the metrics depend on, in the data platform. It also makes sure that the manifest is up to date. +- Your dbt Metadata API pulls in the most recent manifest and allows your integration information to extract metadata from it. + +## Set up dbt Semantic Layer + + + + +## Troubleshooting + +If you're encountering some issues when defining your metrics or setting up the dbt Semantic Layer, check out a list of answers to some of the questions or problems you may be experiencing. + +
+ How are you storing my data? +
+
The dbt Semantic Layer does not store, or cache, or log your data. On each query to the Semantic Layer, the resulting data passes through dbt Cloud servers where it is never stored, cached, or logged. The data from your data platform gets routed through dbt Cloud servers, to your connecting data tool.
+
+
+
+ Is the dbt Semantic Layer open source? +
+
Some components of the dbt Semantic Layer are open source like dbt-core, the dbt_metrics package, and the BSL licensed dbt-server. The dbt Proxy Server (what is actually compiling the dbt code) and the Metadata API are not open source.



+ +During Public Preview, the dbt Semantic Layer is open to all dbt Cloud tiers (Developer, Team, and Enterprise).



+
    +
  • dbt Core users can define metrics in their dbt Core projects and calculate them using macros from the metrics package. To use the dbt Semantic Layer integrations, you will need to have a dbt Cloud account.




  • +
  • Developer accounts will be able to query the Proxy Server using SQL, but will not be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API.




  • +
  • Team and Enterprise accounts will be able to set up the Semantic Layer and Metadata API in the integrated partner tool to import metric definitions.
  • +
+
+
+
+
+ The dbt_metrics_calendar_table does not exist or is not authorized? +
+
All metrics queries are dependent on either the dbt_metrics_calendar_table or a custom calendar set in the users dbt_project.yml. If you have not created this model in the database, these queries will fail and you'll most likely see the following error message: + +Object DATABASE.SCHEMA.DBT_METRICS_DEFAULT_CALENDAR does not exist or not authorized.

+ +Fix: + +
    +
  • If developing locally, run dbt run --select dbt_metrics_default_calendar


  • +
  • If you are using this in production, make sure that you perform a full dbt build or dbt run. If you are running specific selects in your production job, then you will not create this required model.
  • +
+
+
+
+
+ Ephemeral Models - Object does not exist or is not authorized +
+
Metrics cannot be defined on ephemeral models because we reference the underlying table in the query that generates the metric so we need the table/view to exist in the database. If your table/view does not exist in your database, you might see this error message: + + Object 'DATABASE.SCHEMA.METRIC_MODEL_TABLE' does not exist or not authorized.

+ +Fix: +
    +
  • You will need to materialize the model that the metric is built on as a table/view/incremental.
  • +
+
+
+
+ +
+ Mismatched Versions - metric type is ‘’ +
+
If you’re running dbt_metrics ≥v0.3.2 but have dbt-core version ≥1.3.0, you’ll likely see these error messages: + +
    +
  • Error message 1: The metric NAME also references ... but its type is ''. Only metrics of type expression can reference other metrics.
  • +
  • Error message 2: Unknown aggregation style: > in macro default__gen_primary_metric_aggregate (macros/sql_gen/gen_primary_metric_aggregate.sql)
  • +
+The reason you're experiencing this error is because we changed the type property of the metric spec in dbt-core v1.3.0. The new name is calculation_method and the package reflects that new name, so it isn’t finding any type when we try and run outdated code on it.

+ +Fix: + + + +
+
+
+

+ + +## Next steps + +Are you ready to define your own metrics and bring consistency to data consumers? Review the following documents to understand how to structure, define, and query metrics, and set up the dbt Semantic Layer: + +- [How to design and structure dbt metrics: Recommendations for getting started](https://docs.getdbt.com/blog/how-to-design-and-structure-metrics) to understand best practices for designing and structuring metrics in your dbt project +- [dbt metrics](/docs/build/metrics) for in-depth detail on attributes, properties, filters, and how to define and query metrics +- [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-semantic-layer) to learn about the dbt Semantic Layer +- [Understanding the components of the dbt Semantic Layer](https://docs.getdbt.com/blog/understanding-the-components-of-the-dbt-semantic-layer) blog post to see further examples +- [Integrated partner tools](https://www.getdbt.com/product/semantic-layer-integrations) for info on the different integration partners and their documentation +- [dbt Server repo](https://github.com/dbt-labs/dbt-server), which is a persisted HTTP server that wraps dbt core to handle RESTful API requests for dbt operations. diff --git a/website/docs/docs/use-dbt-semantic-layer/set-dbt-semantic-layer.md b/website/docs/docs/use-dbt-semantic-layer/set-dbt-semantic-layer.md new file mode 100644 index 00000000000..90fabea0bca --- /dev/null +++ b/website/docs/docs/use-dbt-semantic-layer/set-dbt-semantic-layer.md @@ -0,0 +1,54 @@ +--- +title: "Set up the dbt Semantic Layer" +id: setup-dbt-semantic-layer +description: "You can set up the dbt Semantic Layer in dbt Cloud." +sidebar_label: "Set up the dbt Semantic Layer" +--- + +With the dbt Semantic Layer, you'll be able to centrally define business metrics, reduce code duplication and inconsistency, create self-service in downstream tools, and more. Configure the dbt Semantic Layer in dbt Cloud to connect with your integrated partner tool. + +## Prerequisites + +Before you set up the dbt Semantic Layer, make sure you meet the following: + + + +- Have a multi-tenant dbt Cloud account, hosted in North America
+- Have both your production and development environments running dbt version 1.3 or higher
+- Use Snowflake data platform
+- Install the dbt metrics package version ">=1.3.0", "<1.4.0" in your dbt project
+- Set up the Metadata API in the integrated tool to import metric definitions + * Developer accounts will be able to query the Proxy Server using SQL, but will not be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API
+- Recommended - Review the dbt metrics page and Understanding the components of the dbt Semantic Layer blog
+ +
+ + + +- Have a multi-tenant dbt Cloud account, hosted in North America
+- Have both your production and development environments running dbt version 1.2 (latest)
+- Use Snowflake data platform
+- Install the dbt metrics package version ">=0.3.0", "<0.4.0" in your dbt project
+- Set up the Metadata API in the integrated tool to import metric definitions + * Developer accounts will be able to query the Proxy Server using SQL, but will not be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API
+- Recommended - Review the dbt metrics page and Understanding the components of the dbt Semantic Layer blog
+ +
+ + + + + +## Set up dbt Semantic Layer + + + +
+ + +## Related docs + +- [Integrated partner tools](https://www.getdbt.com/product/semantic-layer-integrations) for info on the different integration partners and their documentation +- [Product architecture](/docs/use-dbt-semantic-layer/dbt-semantic-layer#product-architecture) page for more information on plan availability +- [dbt metrics](/docs/build/metrics) for in-depth detail on attributes, properties, filters, and how to define and query metrics +- [dbt Server repo](https://github.com/dbt-labs/dbt-server), which is a persisted HTTP server that wraps dbt core to handle RESTful API requests for dbt operations diff --git a/website/docs/faqs/Accounts/change-billing.md b/website/docs/faqs/Accounts/change-billing.md new file mode 100644 index 00000000000..65e47cc9c4a --- /dev/null +++ b/website/docs/faqs/Accounts/change-billing.md @@ -0,0 +1,11 @@ +--- +title: How can I update my billing information? +description: "Instructions on how to update your billing info" +sidebar_label: 'How to update billing' +id: change-billing +--- + + +If you want to change your account's credit card details, select the gear menu in the upper right corner of dbt Cloud. Go to Account Settings → Billing → Payment Information. In the upper right corner of Payment Information, click **Edit** to enter the new credit card details. Only the _account owner_ can make this change. + +To change your billing name or location address, send our Support team a message at support@getdbt.com with the newly updated information, and we can make that change for you! diff --git a/website/docs/faqs/configurable-snapshot-path.md b/website/docs/faqs/Accounts/configurable-snapshot-path.md similarity index 76% rename from website/docs/faqs/configurable-snapshot-path.md rename to website/docs/faqs/Accounts/configurable-snapshot-path.md index 66f084ee775..45e51f20dbf 100644 --- a/website/docs/faqs/configurable-snapshot-path.md +++ b/website/docs/faqs/Accounts/configurable-snapshot-path.md @@ -1,5 +1,8 @@ --- title: Can I store my snapshots in a directory other than the `snapshot` directory in my project? +description: "You can see how to change snapshots directory in your project" +sidebar_label: 'Store snapshot in other directory' +id: configurable-snapshot-path --- By default, dbt expects your snapshot files to be located in the `snapshots` subdirectory of your project. diff --git a/website/docs/faqs/Accounts/dbt-specific-jinja.md b/website/docs/faqs/Accounts/dbt-specific-jinja.md new file mode 100644 index 00000000000..e2623491007 --- /dev/null +++ b/website/docs/faqs/Accounts/dbt-specific-jinja.md @@ -0,0 +1,8 @@ +--- +title: What parts of Jinja are dbt-specific? +description: "You can check which Jinja functions are dbt-specific" +sidebar_label: 'dbt-specific Jinja' +id: dbt-specific-jinja +--- + +There are certain expressions that are specific to dbt — these are documented in the [Jinja function reference](/reference/dbt-jinja-functions) section of these docs. Further, docs blocks, snapshots, and materializations are custom Jinja _blocks_ that exist only in dbt. diff --git a/website/docs/faqs/Accounts/git-account-in-use.md b/website/docs/faqs/Accounts/git-account-in-use.md new file mode 100644 index 00000000000..a84577e6ede --- /dev/null +++ b/website/docs/faqs/Accounts/git-account-in-use.md @@ -0,0 +1,15 @@ +--- +title: Why am I getting an "account in use" error? +description: "What to do when you receive a Git account in use error" +sidebar_label: 'Git account in use' +id: git-account-in-use +--- + +If you're receiving an 'Account in use' error when trying to integrate GitHub in your Profile page, this is because the Git integration is a 1-to-1 integration, so you can only have your Git account linked to one dbt Cloud user account. + +Here are some steps to take to get you unstuck: + +* Log in to the dbt Cloud account integrated with your Git account. Go to your user profile and click on Integrations to remove the link. + +If you don't remember which dbt Cloud account is integrated, please email dbt Support at support@getdbt.com and we'll do our best to disassociate the integration for you. + diff --git a/website/docs/faqs/Accounts/payment-accepted.md b/website/docs/faqs/Accounts/payment-accepted.md new file mode 100644 index 00000000000..2e26063c684 --- /dev/null +++ b/website/docs/faqs/Accounts/payment-accepted.md @@ -0,0 +1,10 @@ +--- +title: Can I pay via invoice? +description: "What payments types are accepted" +sidebar_label: 'Can I pay invoice' +id: payment-accepted +--- + +Presently for Team plans, self-service dbt Cloud payments must be made via credit card and by default, they will be billed monthly based on the number of active developer seats. + +We don't have any plans to do invoicing for self-service teams in the near future, but we *do* currently support invoices for companies on the **dbt Cloud Enterprise plan.** Feel free to [contact us](https://www.getdbt.com/contact) to build your Enterprise pricing. diff --git a/website/docs/faqs/Accounts/slack.md b/website/docs/faqs/Accounts/slack.md new file mode 100644 index 00000000000..01001141e2e --- /dev/null +++ b/website/docs/faqs/Accounts/slack.md @@ -0,0 +1,8 @@ +--- +title: How do I set up Slack notifications? +description: "Instructions on how to set up slack notifications" +sidebar_label: 'How to set up Slack' +id: slack +--- + + diff --git a/website/docs/faqs/Accounts/transfer-account.md b/website/docs/faqs/Accounts/transfer-account.md new file mode 100644 index 00000000000..f3bba49bd7a --- /dev/null +++ b/website/docs/faqs/Accounts/transfer-account.md @@ -0,0 +1,21 @@ +--- +title: How do I transfer account ownership to another user? +description: "Instructions on how to transfer your dbt Cloud user account to another user" +sidebar_label: 'How to transfer dbt Cloud account?' +id: transfer-account + +--- + +You can transfer your dbt Cloud [access control](/docs/collaborate/manage-access/about-access) to another user by following the steps below, depending on your dbt Cloud account plan: + +| Account plan| Steps | +| ------ | ---------- | +| **Developer** | You can transfer ownership by changing the email directly on your dbt Cloud [profile page](https://cloud.getdbt.com/#/profile/). | +| **Team** | Existing account admins with account access can add users to, or remove users from the owner group. | +| **Enterprise** | Account admins can add users to, or remove users from a group with Account Admin permissions. | +| **If all account owners left the company** | If the account owner has left your organization, you will need to work with _your_ IT department to have incoming emails forwarded to the new account owner. Once your IT department has redirected the emails, you can request to reset the user password. Once you log in - you can change the email on the [Profile page](https://cloud.getdbt.com/#/profile/). | + +When you make any account owner and email changes: + +- The new email address _must_ be verified through our email verification process. +- You can update any billing email address or [Notifications Settings](/docs/deploy/job-notifications) to reflect the new account owner changes, if applicable. diff --git a/website/docs/faqs/install-pip-best-practices.md b/website/docs/faqs/Core/install-pip-best-practices.md similarity index 82% rename from website/docs/faqs/install-pip-best-practices.md rename to website/docs/faqs/Core/install-pip-best-practices.md index edfcdc54f53..e36d58296ec 100644 --- a/website/docs/faqs/install-pip-best-practices.md +++ b/website/docs/faqs/Core/install-pip-best-practices.md @@ -1,6 +1,8 @@ --- title: "What are the best practices for installing dbt Core with pip?" -description: "" +description: "Instructions on how to install dbt Core with pip" +sidebar_label: 'Installing dbt Core with pip' +id: install-pip-best-practices.md --- Managing Python local environments can be challenging! You can use these best practices to improve the dbt Core installation with pip. @@ -12,8 +14,8 @@ We recommend using [virtual environments](https://docs.python-guide.org/dev/virt ```shell python3 -m venv dbt-env # create the environment -source dbt-env/bin/activate # activate the environment - +source dbt-env/bin/activate # activate the environment for Mac and Linux +dbt-env\Scripts\activate # activate the environment for Windows ``` If you install `dbt` in a virtual environment, you need to reactivate that same virtual environment each time you create a shell window or session. diff --git a/website/docs/faqs/install-pip-os-prereqs.md b/website/docs/faqs/Core/install-pip-os-prereqs.md similarity index 86% rename from website/docs/faqs/install-pip-os-prereqs.md rename to website/docs/faqs/Core/install-pip-os-prereqs.md index da8c23b93a8..8124eaaa215 100644 --- a/website/docs/faqs/install-pip-os-prereqs.md +++ b/website/docs/faqs/Core/install-pip-os-prereqs.md @@ -1,10 +1,12 @@ --- title: "Does my operating system have prerequisites?" -Description: "You can check whether your operating system has prerequisites for installing dbt Core." +description: "You can check whether your operating system has prerequisites for installing dbt Core." +sidebar_label: 'dbt Core system prerequisites' +id: install-pip-os-prereqs.md --- -Your operating system may require pre-installation setup before installing dbt Core with pip. After downloading and installing any dependencies specific to your development environment, you can proceed with the [pip installation of dbt Core](/docs/dbt-cli/install/pip). +Your operating system may require pre-installation setup before installing dbt Core with pip. After downloading and installing any dependencies specific to your development environment, you can proceed with the [pip installation of dbt Core](/docs/get-started/pip-install). ### CentOS @@ -55,4 +57,4 @@ Windows requires Python and git to successfully install and run dbt Core. Install [Git for Windows](https://git-scm.com/downloads) and [Python version 3.7 or higher for Windows](https://www.python.org/downloads/windows/). -For further questions, please see the [Python compatibility FAQ](/docs/faqs/install-python-compatibility) +For further questions, please see the [Python compatibility FAQ](/docs/faqs/Core/install-python-compatibility) diff --git a/website/docs/faqs/install-python-compatibility.md b/website/docs/faqs/Core/install-python-compatibility.md similarity index 93% rename from website/docs/faqs/install-python-compatibility.md rename to website/docs/faqs/Core/install-python-compatibility.md index 7f694e01586..9e20cbb0f85 100644 --- a/website/docs/faqs/install-python-compatibility.md +++ b/website/docs/faqs/Core/install-python-compatibility.md @@ -1,6 +1,8 @@ --- title: What version of Python can I use? description: "Python versions 3.7 and newer can be used with dbt Core" +sidebar_label: 'Python version' +id: install-python-compatibility --- diff --git a/website/docs/faqs/document-all-columns.md b/website/docs/faqs/Docs/document-all-columns.md similarity index 71% rename from website/docs/faqs/document-all-columns.md rename to website/docs/faqs/Docs/document-all-columns.md index 19fd5c4f96a..0de7881bfdd 100644 --- a/website/docs/faqs/document-all-columns.md +++ b/website/docs/faqs/Docs/document-all-columns.md @@ -1,5 +1,8 @@ --- title: Do I need to add a yaml entry for column for it to appear in the docs site? +description: "All columns appear in your docs site" +sidebar_label: 'Types of columns included in doc site' +id: document-all-columns --- Fortunately, no! diff --git a/website/docs/faqs/document-other-resources.md b/website/docs/faqs/Docs/document-other-resources.md similarity index 65% rename from website/docs/faqs/document-other-resources.md rename to website/docs/faqs/Docs/document-other-resources.md index 58dda592d36..b5d612fbcfc 100644 --- a/website/docs/faqs/document-other-resources.md +++ b/website/docs/faqs/Docs/document-other-resources.md @@ -1,5 +1,8 @@ --- title: Can I document things other than models, like sources, seeds, and snapshots? +description: "Using the description key to document resources" +sidebar_label: 'Document other resources' +id: document-other-resources --- Yes! You can document almost everything in your project using the `description:` key. Check out the reference docs on [descriptions](description) for more info! diff --git a/website/docs/faqs/documenting-macros.md b/website/docs/faqs/Docs/documenting-macros.md similarity index 82% rename from website/docs/faqs/documenting-macros.md rename to website/docs/faqs/Docs/documenting-macros.md index 7db45b3ef74..71211f2c100 100644 --- a/website/docs/faqs/documenting-macros.md +++ b/website/docs/faqs/Docs/documenting-macros.md @@ -1,12 +1,15 @@ --- title: How do I document macros? +description: "You can use a schema file to document macros" +sidebar_label: 'Document macros' +id: documenting-macros --- The `macros:` key is new in 0.16.0. To document macros, use a [schema file](macro-properties) and nest the configurations under a `macros:` key -## Example: +## Example diff --git a/website/docs/faqs/Docs/long-descriptions.md b/website/docs/faqs/Docs/long-descriptions.md new file mode 100644 index 00000000000..f353050468e --- /dev/null +++ b/website/docs/faqs/Docs/long-descriptions.md @@ -0,0 +1,23 @@ +--- +title: How do I write long-form explanations in my descriptions? +description: "Write long descriptions in your documentation" +sidebar_label: 'Write long descriptions' +id: long-descriptions +--- +If you need more than a sentence to explain a model, you can: + +1. Split your description over multiple lines ([yaml docs](https://yaml-multiline.info/)), like so: + + ```yml + version: 2 + + models: + - name: customers + description: > + Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod + tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, + quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo + consequat. + ``` + +2. Use a [docs block](/documentation#using-docs-blocks) to write the description in a Markdown file. diff --git a/website/docs/faqs/Docs/sharing-documentation.md b/website/docs/faqs/Docs/sharing-documentation.md new file mode 100644 index 00000000000..4c6e0e84f77 --- /dev/null +++ b/website/docs/faqs/Docs/sharing-documentation.md @@ -0,0 +1,8 @@ +--- +title: How do I share my documentation with my team members? +description: "Use read-only seats to share documentation" +sidebar_label: 'Share documentation with teammates' +id: sharing-documentation +--- + +If you're using dbt Cloud to deploy your project, and have the [Team plan](https://www.getdbt.com/pricing/), you can have up to 5 read-only users, who will be able access the documentation for your project. diff --git a/website/docs/faqs/beta-release.md b/website/docs/faqs/Environments/beta-release.md similarity index 75% rename from website/docs/faqs/beta-release.md rename to website/docs/faqs/Environments/beta-release.md index 6cfea736780..5eef07d3510 100644 --- a/website/docs/faqs/beta-release.md +++ b/website/docs/faqs/Environments/beta-release.md @@ -1,4 +1,7 @@ --- title: What is a beta release? +description: "How to try out beta features" +sidebar_label: 'What is a beta release?' +id: beta-release --- This is a chance to try out brand-new functionality. You get to start planning for use cases that the next minor version will unlock. We get to hear from you about unexpected behavior and nasty bugs, so that the release candidate has more polish and fewer surprises. diff --git a/website/docs/faqs/Environments/diff-database-environment.md b/website/docs/faqs/Environments/diff-database-environment.md new file mode 100644 index 00000000000..49675540b80 --- /dev/null +++ b/website/docs/faqs/Environments/diff-database-environment.md @@ -0,0 +1,8 @@ +--- +title: Can I set a different connection at the environment level? +description: "Separate projects for different environments workaround" +sidebar_label: 'Set different database connections at environment level' +id: diff-database-environment +--- + +Although you're unable to set a different connection at the environment level, there is a workaround where you can have separate projects for their different environments and link them to different hostnames, while still connecting to the same repo. diff --git a/website/docs/faqs/profile-env-vars.md b/website/docs/faqs/Environments/profile-env-vars.md similarity index 56% rename from website/docs/faqs/profile-env-vars.md rename to website/docs/faqs/Environments/profile-env-vars.md index 32912361343..d9cea7946de 100644 --- a/website/docs/faqs/profile-env-vars.md +++ b/website/docs/faqs/Environments/profile-env-vars.md @@ -1,4 +1,7 @@ --- title: Can I use environment variables in my profile? +description: "Use env_var in your profile" +sidebar_label: 'Use env_var in your profile' +id: profile-env-vars --- Yes! Check out the docs on [environment variables](env_var) for more information. diff --git a/website/docs/faqs/Environments/profile-name.md b/website/docs/faqs/Environments/profile-name.md new file mode 100644 index 00000000000..9f9d8b0ae59 --- /dev/null +++ b/website/docs/faqs/Environments/profile-name.md @@ -0,0 +1,7 @@ +--- +title: What should I name my profile? +description: "Use company name for profile name" +sidebar_label: 'Naming your profile' +id: profile-name +--- +We typically use a company name for a profile name, and then use targets to differentiate between `dev` and `prod`. Check out the docs on [managing environments](/docs/collaborate/environments) for more information. diff --git a/website/docs/faqs/Environments/target-names.md b/website/docs/faqs/Environments/target-names.md new file mode 100644 index 00000000000..6d3cd78b8e0 --- /dev/null +++ b/website/docs/faqs/Environments/target-names.md @@ -0,0 +1,8 @@ +--- +title: What should I name my target? +description: "Using targets to clarify development and production runs" +sidebar_label: 'Naming your target' +id: target-names +--- + +We typically use targets to differentiate between development and production runs of dbt, naming the targets `dev` and `prod` respectively. Check out the docs on [managing environments](/docs/collaborate/environments) for more information. diff --git a/website/docs/faqs/Git/gitignore.md b/website/docs/faqs/Git/gitignore.md new file mode 100644 index 00000000000..fb097bb4043 --- /dev/null +++ b/website/docs/faqs/Git/gitignore.md @@ -0,0 +1,25 @@ +--- +title: Why can't I checkout a branch or create a new branch? +description: "Add or fill in gitignore file" +sidebar_label: 'Unable to checkout or create branch' +id: gitignore +--- + +If you're finding yourself unable to revert changes, check out a branch or click commit - this is usually do to your project missing a [.gitignore](https://github.com/dbt-labs/dbt-starter-project/blob/main/.gitignore) file OR your gitignore file doesn't contain the necessary content inside the folder. + +This is what causes that 'commit' git action button to display. No worries though - to fix this, you'll need to complete the following steps in order: + +1. In the Cloud IDE, add the missing .gitignore file or contents to your project. You'll want to make sure the .gitignore file includes the following: + + ```shell + target/ + dbt_modules/ + dbt_packages/ + logs/ + ``` + +2. Once you've added that, make sure to save and commit. + +3. Navigate to the same branch in your remote repository (which can be accessed directly through your git provider's web interface) and delete the logs, target, and dbt_modules/dbt_packages folders. + +4. Go back into the Cloud IDE and reclone your repository. This can be done by clicking on the green "ready" in the bottom right corner of the IDE (next to the command bar), and then clicking the orange "reclone repo" button in the pop up. diff --git a/website/docs/faqs/Git/gitlab-authentication.md b/website/docs/faqs/Git/gitlab-authentication.md new file mode 100644 index 00000000000..0debdf87873 --- /dev/null +++ b/website/docs/faqs/Git/gitlab-authentication.md @@ -0,0 +1,29 @@ +--- +title: I'm seeing a Gitlab authentication out of date error loop +description: "GitLab and dbt Cloud deploy key mismatch " +sidebar_label: 'GitLab authentication out of date' +id: gitlab-authentication +--- + +If you're seeing a 'GitLab Authentication is out of date' 500 server error page - this usually occurs when the deploy key in the repository settings in both dbt Cloud and GitLab do not match. + +No worries - this is a current issue the dbt Labs team is working on and we have a few workarounds for you to try: + +### 1st Workaround + +1. Disconnect repo from project in dbt Cloud. +2. Go to Gitlab and click on Settings > Repository. +3. Under Repository Settings, remove/revoke active dbt Cloud deploy tokens and deploy keys. +4. Attempt to reconnect your repository via dbt Cloud. +5. You would then need to check Gitlab to make sure that the new deploy key is added. +6. Once confirmed that it's added, refresh dbt Cloud and try developing once again. + +### 2nd Workaround + +1. Keep repo in project as is -- don't disconnect. +2. Copy the deploy key generated in dbt Cloud. +3. Go to Gitlab and click on Settings > Repository. +4. Under Repository Settings, manually add to your Gitlab project deploy key repo (with `Grant write permissions` box checked). +5. Go back to dbt Cloud, refresh your page and try developing again. + +If you've tried the workarounds above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Git/gitlab-selfhosted.md b/website/docs/faqs/Git/gitlab-selfhosted.md new file mode 100644 index 00000000000..c02c363cf3a --- /dev/null +++ b/website/docs/faqs/Git/gitlab-selfhosted.md @@ -0,0 +1,10 @@ +--- +title: Can self-hosted GitLab instances only be connected via dbt Cloud Enterprise plans? +description: "Self hosted GitLab instances plan availability" +sidebar_label: 'Self-hosted GitLab instances available for dbt Cloud Enterprise' +id: gitlab-selfhosted +--- + +Presently yes, this is only available to Enterprise users. This is because of the way you have to set up the GitLab app redirect URL for auth, which can only be customized if you're a user on an Enterprise plan. + +Check out our [pricing page](https://www.getdbt.com/pricing/) for more information or feel free to [contact us](https://www.getdbt.com/contact) to build your Enterprise pricing. diff --git a/website/docs/faqs/Git/google-cloud-repo.md b/website/docs/faqs/Git/google-cloud-repo.md new file mode 100644 index 00000000000..1e88ac9a6ed --- /dev/null +++ b/website/docs/faqs/Git/google-cloud-repo.md @@ -0,0 +1,18 @@ +--- +title: How can I connect dbt to a Google Source repository? +description: "Use SSH URL method to connect to Google Source Repo" +sidebar_label: 'Connect dbt to a Google Source repository' +id: google-cloud-repo + +--- + +Although we don't officially support Google Cloud as a git repository, the below workaround using the SSH URL method should help you to connect: + +- First: "import" your Repository into dbt Cloud using the SSH URL provided to you by GCP. That will look something like: +`ssh://drew@fishtownanalytics.com@source.developers.google.com:2022/p/dbt-integration-tests/r/drew-debug` + +- After importing the repo, you should see a public key generated by dbt Cloud for the repository. You'll want to copy that public key into a new SSH Key for your user here: [https://source.cloud.google.com/user/ssh_keys](https://source.cloud.google.com/user/ssh_keys) + +- After saving this SSH key, dbt Cloud should be able to read and write to this repo. + +If you've tried the workaround above and are still having issues connecting - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Git/managed-repo.md b/website/docs/faqs/Git/managed-repo.md new file mode 100644 index 00000000000..c68bd5ab896 --- /dev/null +++ b/website/docs/faqs/Git/managed-repo.md @@ -0,0 +1,10 @@ +--- +title: How can we move our project from a managed repository, to a self-hosted repository? +description: "Email Support to request your managed repository" +sidebar_label: 'How to request your managed repository zipfile' +id: managed-repo +--- + +dbt Labs can send your managed repository through a ZIP file in its current state for you to push up to a git provider. After that, you'd just need to switch over to the [repo in your project](/docs/collaborate/git/import-a-project-by-git-url) to point to the new repository. + +When you're ready to do this, [contact the dbt Labs Support team](mailto:support@getdbt.com) with your request and your managed repo URL, which you can find by navigating to your project setting. To find project settings, click the gear icon in the upper right, select **Account settings**, click **Projects**, and then select your project. Under **Repository** in the project details page, you can find your managed repo URL. diff --git a/website/docs/faqs/Git/run-on-pull.md b/website/docs/faqs/Git/run-on-pull.md new file mode 100644 index 00000000000..3536259bb79 --- /dev/null +++ b/website/docs/faqs/Git/run-on-pull.md @@ -0,0 +1,15 @@ +--- +title: Why is Run on Pull request grayed out? +description: "Use the GitHub auth method to enable Rull on Pull request" +sidebar_label: 'Run on Pull request grayed out' +id: run-on-pull +--- + +If you're unable to enable Run on Pull requests, you'll want to make sure your existing repo was not added via the Deploy Key auth method. + +If it was added via a deploy key method, you'll want to use the [GitHub auth method](https://docs.getdbt.com/docs/cloud-installing-the-github-application) to enable CI in dbt Cloud. + +To go ahead and enable 'Run on Pull requests', you'll want to remove dbt Cloud from the Apps & Integration on GitHub and re-integrate it again via the GitHub app method. + +If you've tried the workaround above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! + diff --git a/website/docs/faqs/jinja-whitespace.md b/website/docs/faqs/Jinja/jinja-whitespace.md similarity index 82% rename from website/docs/faqs/jinja-whitespace.md rename to website/docs/faqs/Jinja/jinja-whitespace.md index cd26398a2b9..90a7dc5d2e1 100644 --- a/website/docs/faqs/jinja-whitespace.md +++ b/website/docs/faqs/Jinja/jinja-whitespace.md @@ -1,6 +1,10 @@ --- title: My compiled SQL has a lot of spaces and new lines, how can I get rid of it? +description: "Managing whitespace control" +sidebar_label: 'Compiled sql has a lot of white space' +id: jinja-whitespace --- + This is known as "whitespace control". Use a minus sign (`-`, e.g. `{{- ... -}}`, `{%- ... %}`, `{#- ... -#}`) at the start or end of a block to strip whitespace before or after the block (more docs [here](https://jinja.palletsprojects.com/page/templates/#whitespace-control)). Check out the [tutorial on using Jinja](using-jinja#use-whitespace-control-to-tidy-up-compiled-code) for an example. diff --git a/website/docs/faqs/quoting-column-names.md b/website/docs/faqs/Jinja/quoting-column-names.md similarity index 87% rename from website/docs/faqs/quoting-column-names.md rename to website/docs/faqs/Jinja/quoting-column-names.md index ccca7566dc4..aff4a36b886 100644 --- a/website/docs/faqs/quoting-column-names.md +++ b/website/docs/faqs/Jinja/quoting-column-names.md @@ -1,10 +1,16 @@ --- title: Why do I need to quote column names in Jinja? +description: "Use quotes to pass string" +sidebar_label: 'Why quote column names in Jinja' +id: quoting-column-names --- + In the [macro example](jinja-macros#macros) we passed the column name `amount` quotes: + ```sql {{ cents_to_dollars('amount') }} as amount_usd ``` + We have to use quotes to pass the _string_ `'amount'` to the macro. Without the quotes, the Jinja parser will look for a variable named `amount`. Since this doesn't exist, it will compile to nothing. diff --git a/website/docs/faqs/which-jinja-docs.md b/website/docs/faqs/Jinja/which-jinja-docs.md similarity index 73% rename from website/docs/faqs/which-jinja-docs.md rename to website/docs/faqs/Jinja/which-jinja-docs.md index 01c17f50954..bbb68318ffb 100644 --- a/website/docs/faqs/which-jinja-docs.md +++ b/website/docs/faqs/Jinja/which-jinja-docs.md @@ -1,9 +1,12 @@ --- title: Which docs should I use when writing Jinja or creating a macro? +description: "Useful Jinja docs" +sidebar_label: 'Useful Jinja docs' +id: which-jinja-docs --- If you are stuck with a Jinja issue, it can get confusing where to check for more information. We recommend you check (in order): 1. [Jinja's Template Designer Docs](https://jinja.palletsprojects.com/page/templates/): This is the best reference for most of the Jinja you'll use -2. [Our Jinja function reference](dbt-jinja-functions): This documents any additional functionality we've added to Jinja in dbt. +2. [Our Jinja function reference](/reference/dbt-jinja-functions): This documents any additional functionality we've added to Jinja in dbt. 3. [Agate's table docs](https://agate.readthedocs.io/page/api/table.html): If you're operating on the result of a query, dbt will pass it back to you as an agate table. This means that the methods you call on the belong to the Agate library rather than Jinja or dbt. diff --git a/website/docs/faqs/available-configurations.md b/website/docs/faqs/Models/available-configurations.md similarity index 79% rename from website/docs/faqs/available-configurations.md rename to website/docs/faqs/Models/available-configurations.md index 7af6abe4c33..5d59dfd4246 100644 --- a/website/docs/faqs/available-configurations.md +++ b/website/docs/faqs/Models/available-configurations.md @@ -1,7 +1,11 @@ --- -title: What other model configurations are there? +title: What model configurations exist? +description: "Learning about model configurations" +sidebar_label: 'Model configurations' +id: available-configurations --- You can also configure: + * [tags](resource-configs/tags) to support easy categorization and graph selection * [custom schemas](resource-configs/schema) to split your models across multiple schemas * [aliases](resource-configs/alias) if your / name should differ from the filename diff --git a/website/docs/faqs/available-materializations.md b/website/docs/faqs/Models/available-materializations.md similarity index 78% rename from website/docs/faqs/available-materializations.md rename to website/docs/faqs/Models/available-materializations.md index 76d36a171e8..f8b1530aca4 100644 --- a/website/docs/faqs/available-materializations.md +++ b/website/docs/faqs/Models/available-materializations.md @@ -1,5 +1,8 @@ --- title: What materializations are available in dbt? +description: "dbt uses 4 materializations" +sidebar_label: 'Materializations available' +id: available-materializations --- dbt ships with four materializations: `view`, `table`, `incremental` and `ephemeral`. Check out the documentation on [materializations](materializations) for more information on each of these options. diff --git a/website/docs/faqs/configurable-model-path.md b/website/docs/faqs/Models/configurable-model-path.md similarity index 82% rename from website/docs/faqs/configurable-model-path.md rename to website/docs/faqs/Models/configurable-model-path.md index 39933c88dd2..9d16bb1f05f 100644 --- a/website/docs/faqs/configurable-model-path.md +++ b/website/docs/faqs/Models/configurable-model-path.md @@ -1,5 +1,9 @@ --- title: Can I store my models in a directory other than the `models` directory in my project? +description: "How to name models directory" +sidebar_label: 'How to name models directory' +id: configurable-model-path + --- diff --git a/website/docs/faqs/Models/create-a-schema.md b/website/docs/faqs/Models/create-a-schema.md new file mode 100644 index 00000000000..4bac0cfcd53 --- /dev/null +++ b/website/docs/faqs/Models/create-a-schema.md @@ -0,0 +1,8 @@ +--- +title: Do I need to create my target schema before running dbt? +description: "Target schema is checked or created for you before running dbt" +sidebar_label: 'Target schema before running dbt' +id: create-a-schema + +--- +Nope! dbt will check if the schema exists when it runs. If the schema does not exist, dbt will create it for you. diff --git a/website/docs/faqs/create-dependencies.md b/website/docs/faqs/Models/create-dependencies.md similarity index 83% rename from website/docs/faqs/create-dependencies.md rename to website/docs/faqs/Models/create-dependencies.md index a436ec8066c..4a84e3ab7d7 100644 --- a/website/docs/faqs/create-dependencies.md +++ b/website/docs/faqs/Models/create-dependencies.md @@ -1,5 +1,9 @@ --- title: How do I create dependencies between models? +description: "Using ref function to create dependencies" +sidebar_label: 'Creating dependencies between models' +id: create-dependencies + --- When you use the `ref` [function](dbt-jinja-functions/ref), dbt automatically infers the dependencies between models. @@ -40,4 +44,4 @@ Found 2 models, 28 tests, 0 snapshots, 0 analyses, 130 macros, 0 operations, 0 s Done. PASS=2 WARN=0 ERROR=0 SKIP=0 TOTAL=2 ``` -To get some practice with this, we recommend you complete the [tutorial](tutorial/getting-started.md) to build your first dbt project +To learn more about building a dbt project, we recommend you complete the [Getting Started guide](/docs/get-started/getting-started/overview). diff --git a/website/docs/faqs/insert-records.md b/website/docs/faqs/Models/insert-records.md similarity index 53% rename from website/docs/faqs/insert-records.md rename to website/docs/faqs/Models/insert-records.md index d6aee7226c9..d522f6fbc85 100644 --- a/website/docs/faqs/insert-records.md +++ b/website/docs/faqs/Models/insert-records.md @@ -1,8 +1,12 @@ --- title: If models can only be `select` statements, how do I insert records? +description: "Incremental models to insert records" +sidebar_label: 'How to insert records' +id: insert-records + --- For those coming from an (Extract Transform Load) paradigm, there's often a desire to write transformations as `insert` and `update` statements. In comparison, dbt will wrap your `select` query in a `create table as` statement, which can feel counter-productive. -* If you wish to use `insert` statements for perfomance reasons (i.e. to reduce data that is processed), consider [incremental models](configuring-incremental-models) -* If you wish to use `insert` statements since your source data is constantly changing (e.g. to create "Type 2 Slowly Changing Dimensions"), consider [snapshotting your source data](snapshots), and building models on top of your snaphots. +* If you wish to use `insert` statements for performance reasons (i.e. to reduce data that is processed), consider [incremental models](/docs/build/incremental-models) +* If you wish to use `insert` statements since your source data is constantly changing (e.g. to create "Type 2 Slowly Changing Dimensions"), consider [snapshotting your source data](/docs/build/sources#snapshotting-source-data-freshness), and building models on top of your snaphots. diff --git a/website/docs/faqs/model-custom-schemas.md b/website/docs/faqs/Models/model-custom-schemas.md similarity index 71% rename from website/docs/faqs/model-custom-schemas.md rename to website/docs/faqs/Models/model-custom-schemas.md index dbf3ec113ce..4a00ea9e7be 100644 --- a/website/docs/faqs/model-custom-schemas.md +++ b/website/docs/faqs/Models/model-custom-schemas.md @@ -1,7 +1,10 @@ --- -title: Can I build my models in a schema other than my target schema? +title: Can I build my models in a schema other than my target schema or split my models across multiple schemas? +description: "You can build models outside target schema" +sidebar_label: 'How to build models in schema(s) outside target schema' +id: model-custom-schemas + --- -## Or: Can I split my models across multiple schemas? Yes! Use the [schema](reference/resource-configs/schema.md) configuration in your `dbt_project.yml` file, or using a `config` block: @@ -20,7 +23,6 @@ models: - ```sql diff --git a/website/docs/faqs/Models/reference-models-in-another-project.md b/website/docs/faqs/Models/reference-models-in-another-project.md new file mode 100644 index 00000000000..0a06b04e7bd --- /dev/null +++ b/website/docs/faqs/Models/reference-models-in-another-project.md @@ -0,0 +1,11 @@ +--- +title: How can I reference models or macros in another project? +description: "Use packages to add another project to your dbt project" +sidebar_label: 'Reference models or macros in another project' +id: reference-models-in-another-project + +--- + +You can use [packages]/docs/build/packages) to add another project to your dbt +project, including other projects you've created. Check out the [docs](/docs/build/packages) +for more information! diff --git a/website/docs/faqs/removing-deleted-models.md b/website/docs/faqs/Models/removing-deleted-models.md similarity index 76% rename from website/docs/faqs/removing-deleted-models.md rename to website/docs/faqs/Models/removing-deleted-models.md index 150d10237a1..05682465dc0 100644 --- a/website/docs/faqs/removing-deleted-models.md +++ b/website/docs/faqs/Models/removing-deleted-models.md @@ -1,5 +1,9 @@ --- title: How do I remove deleted models from my data warehouse? +description: "Remove deleted models directly in your data warehouse" +sidebar_label: 'Removing deleted models from your warehouse' +id: removing-deleted-models + --- If you delete a model from your dbt project, dbt does not automatically drop the relation from your schema. This means that you can end up with extra objects in schemas that dbt creates, which can be confusing to other users. diff --git a/website/docs/faqs/run-downtime.md b/website/docs/faqs/Models/run-downtime.md similarity index 59% rename from website/docs/faqs/run-downtime.md rename to website/docs/faqs/Models/run-downtime.md index 5b976bde258..4c19511061b 100644 --- a/website/docs/faqs/run-downtime.md +++ b/website/docs/faqs/Models/run-downtime.md @@ -1,6 +1,10 @@ --- title: If I rerun dbt, will there be any downtime as models are rebuilt? +description: "No downtime when re-building models in dbt " +sidebar_label: 'Re-building models in dbt atomically' +id: run-downtime + --- Nope! The SQL that dbt generates behind the scenes ensures that any relations are replaced atomically (i.e. your business users won't experience any downtime). -The implementation of this varies on each warehouse, check out the [logs](faqs/checking-logs.md) to see the SQL dbt is executing. +The implementation of this varies on each warehouse, check out the [logs](/docs/faqs/Runs/checking-logs) to see the SQL dbt is executing. diff --git a/website/docs/faqs/source-quotes.md b/website/docs/faqs/Models/source-quotes.md similarity index 86% rename from website/docs/faqs/source-quotes.md rename to website/docs/faqs/Models/source-quotes.md index 5b010dc1e70..da5a278669b 100644 --- a/website/docs/faqs/source-quotes.md +++ b/website/docs/faqs/Models/source-quotes.md @@ -1,5 +1,9 @@ --- title: I need to use quotes to select from my source, what should I do? +description: "Use quoting property to quote values" +sidebar_label: 'How to quote values' +id: source-quotes + --- This is reasonably common on Snowflake in particular. diff --git a/website/docs/faqs/specifying-column-types.md b/website/docs/faqs/Models/specifying-column-types.md similarity index 82% rename from website/docs/faqs/specifying-column-types.md rename to website/docs/faqs/Models/specifying-column-types.md index 034df62abcb..1adaf30fdce 100644 --- a/website/docs/faqs/specifying-column-types.md +++ b/website/docs/faqs/Models/specifying-column-types.md @@ -1,5 +1,9 @@ --- title: How do I specify column types? +description: "Specify column types in models" +sidebar_label: 'Specify column types in models' +id: specifying-column-types + --- Simply cast the column to the correct type in your model: @@ -33,6 +37,7 @@ create table dbt_alice.my_table as ( So long as your model queries return the correct column type, the table you create will also have the correct column type. To define additional column options: -* Rather than enforcing uniqueness and not-null constraints on your column, use dbt's [testing](building-a-dbt-project/tests) functionality to check that your assertions about your model hold true. + +* Rather than enforcing uniqueness and not-null constraints on your column, use dbt's [testing](/docs/build/tests) functionality to check that your assertions about your model hold true. * Rather than creating default values for a column, use SQL to express defaults (e.g. `coalesce(updated_at, current_timestamp()) as updated_at`) * In edge-cases where you _do_ need to alter a column (e.g. column-level encoding on Redshift), consider implementing this via a [post-hook](pre-hook-post-hook). diff --git a/website/docs/faqs/sql-dialect.md b/website/docs/faqs/Models/sql-dialect.md similarity index 86% rename from website/docs/faqs/sql-dialect.md rename to website/docs/faqs/Models/sql-dialect.md index a8885a70fda..df7168a1b60 100644 --- a/website/docs/faqs/sql-dialect.md +++ b/website/docs/faqs/Models/sql-dialect.md @@ -1,14 +1,15 @@ --- -title: Which SQL dialect should I write my models in? +title: Which SQL dialect should I write my models in? Or which SQL dialect does dbt use? +description: "Use SQL dialect of your own database" +sidebar_label: 'Which SQL dialect to use?' +id: sql-dialect --- -Or: -### Which SQL dialect does dbt use? dbt can feel like magic, but it isn't actually magic. Under the hood, it's running SQL in your own warehouse — your data is not processed outside of your warehouse. As such, your models should just use the **SQL dialect of your own database**. Then, when dbt wraps your `select` statements in the appropriate or , it will use the correct DML for your warehouse — all of this logic is written in to dbt. -You can find more information about the databases, platforms, and query engines that dbt supports in the [Supported Adapters](available-adapters) docs. +You can find more information about the databases, platforms, and query engines that dbt supports in the [Supported Data Platforms](supported-data-platforms) docs. Want to go a little deeper on how this works? Consider a snippet of SQL that works on each warehouse: diff --git a/website/docs/faqs/unique-model-names.md b/website/docs/faqs/Models/unique-model-names.md similarity index 66% rename from website/docs/faqs/unique-model-names.md rename to website/docs/faqs/Models/unique-model-names.md index f18bcd81fc5..b1a523427c0 100644 --- a/website/docs/faqs/unique-model-names.md +++ b/website/docs/faqs/Models/unique-model-names.md @@ -1,6 +1,11 @@ --- title: Do model names need to be unique? +description: "Unique model names to build dependencies" +sidebar_label: 'Model names need to be unique' +id: unique-model-names + --- + Yes! To build dependencies between models, you need to use the `ref` function. The `ref` function only takes one argument — the model name (i.e. the filename). As a result, these model names need to be unique, _even if they are in distinct folders_. -Often, this question comes up because users want to give two models the same name in their warehouse, splitting them across separate schemas (e.g. `stripe.users` and `app.users`). Checkout the docs on [custom aliases](using-custom-aliases) and [custom schemas](using-custom-schemas) to achieve this. +Often, this question comes up because users want to give two models the same name in their warehouse, splitting them across separate schemas (e.g. `stripe.users` and `app.users`). Checkout the docs on [custom aliases](/docs/build/custom-aliases) and [custom schemas](/docs/build/custom-schemas) to achieve this. diff --git a/website/docs/faqs/Project/dbt-source-freshness.md b/website/docs/faqs/Project/dbt-source-freshness.md new file mode 100644 index 00000000000..496d5061eab --- /dev/null +++ b/website/docs/faqs/Project/dbt-source-freshness.md @@ -0,0 +1,14 @@ +--- +title: Are the results of freshness stored anywhere? +description: "How to access Source Freshness results" +sidebar_label: 'Accessing Source Freshness results' +id: dbt-source-freshness + +--- +Yes! + +The `dbt source freshness` command will output a pass/warning/error status for each selected in the freshness snapshot. + +Additionally, dbt will write the freshness results to a file in the `target/` directory called `sources.json` by default. You can also override this destination, use the `-o` flag to the `dbt source freshness` command. + +After enabling source freshness within a job, configure [Artifacts](docs/dbt-cloud/using-dbt-cloud/artifacts) in your **Project Details** page, which you can find by clicking the gear icon and then selecting **Account settings**. You can see the current status for source freshness by clicking **View Sources** in the job page. diff --git a/website/docs/faqs/debugging-jinja.md b/website/docs/faqs/Project/debugging-jinja.md similarity index 72% rename from website/docs/faqs/debugging-jinja.md rename to website/docs/faqs/Project/debugging-jinja.md index 8eadf722976..bebc76b4f99 100644 --- a/website/docs/faqs/debugging-jinja.md +++ b/website/docs/faqs/Project/debugging-jinja.md @@ -1,5 +1,9 @@ --- title: How do I debug my Jinja? +description: "Using target folder or log function to debug Jinja" +sidebar_label: 'Debugging Jinja' +id: debugging-jinja + --- You should get familiar with checking the compiled SQL in `target/compiled//` and the logs in `logs/dbt.log` to see what dbt is running behind the scenes. diff --git a/website/docs/faqs/define-a-column-type.md b/website/docs/faqs/Project/define-a-column-type.md similarity index 87% rename from website/docs/faqs/define-a-column-type.md rename to website/docs/faqs/Project/define-a-column-type.md index 13f2844bc6c..0f15f9e6892 100644 --- a/website/docs/faqs/define-a-column-type.md +++ b/website/docs/faqs/Project/define-a-column-type.md @@ -1,5 +1,9 @@ --- title: How do I define a column type? +description: "Using cast function to define a column type" +sidebar_label: 'How to define a column type' +id: define-a-column-type + --- Your warehouse's SQL engine automatically assigns a [datatype](https://www.w3schools.com/sql/sql_datatypes.asp) to every column, whether it's found in a source or model. To force SQL to treat a columns a certain datatype, use `cast` functions: @@ -16,7 +20,7 @@ from {{ ref('stg_orders') }} -Many modern data warehouses now support `::` syntax as a shorthand for `cast( as )`. +Many modern s now support `::` syntax as a shorthand for `cast( as )`. diff --git a/website/docs/faqs/Project/docs-for-multiple-projects.md b/website/docs/faqs/Project/docs-for-multiple-projects.md new file mode 100644 index 00000000000..b7aa1452b39 --- /dev/null +++ b/website/docs/faqs/Project/docs-for-multiple-projects.md @@ -0,0 +1,11 @@ +--- +title: Can I render docs for multiple projects? +description: "Using packages to render docs for multiple projects" +sidebar_label: 'Render docs for multiple projects' +id: docs-for-multiple-projects + +--- + +Yes! To do this, you'll need to create a "super project" that lists each project as a dependent [package](/docs/build/packages) in a `packages.yml` file. Then run `dbt deps` to install the projects as packages, prior to running `dbt docs generate`. + +If you are going down the route of multiple projects, be sure to check out our advice [1](https://discourse.getdbt.com/t/should-i-have-an-organisation-wide-project-a-monorepo-or-should-each-work-flow-have-their-own/666) [2](https://discourse.getdbt.com/t/how-to-configure-your-dbt-repository-one-or-many/2121) on the topic. diff --git a/website/docs/faqs/example-projects.md b/website/docs/faqs/Project/example-projects.md similarity index 76% rename from website/docs/faqs/example-projects.md rename to website/docs/faqs/Project/example-projects.md index 45aed62309b..d4bebc7206d 100644 --- a/website/docs/faqs/example-projects.md +++ b/website/docs/faqs/Project/example-projects.md @@ -1,13 +1,17 @@ --- title: Are there any example dbt projects? +description: "List of example dbt projects" +sidebar_label: 'dbt project examples' +id: example-projects + --- Yes! -* **Getting Started Tutorial:** You can build your own example dbt project in the [Getting Started Tutorial](tutorial/getting-started.md) +* **Getting Started Tutorial:** You can build your own example dbt project in the [Getting Started guide](/docs/get-started/getting-started/overview) * **Jaffle Shop:** A demonstration project (closely related to the tutorial) for a fictional ecommerce store ([source code](https://github.com/dbt-labs/jaffle_shop)) * **MRR Playbook:** A demonstration project that models subscription revenue ([source code](https://github.com/dbt-labs/mrr-playbook), [docs](https://www.getdbt.com/mrr-playbook/#!/overview)) * **Attribution Playbook:** A demonstration project that models marketing attribution ([source code](https://github.com/dbt-labs/attribution-playbook), [docs](https://www.getdbt.com/attribution-playbook/#!/overview)) * **GitLab:** Gitlab's internal dbt project is open source and is a great example of how to use dbt at scale ([source code](https://gitlab.com/gitlab-data/analytics/-/tree/master/transform/snowflake-dbt), [docs](https://dbt.gitlabdata.com/)) -If you have an example project to add to this list, suggest an [edit](https://github.com/dbt-labs/docs.getdbt.com/edit/master/website/docs/faqs/example-projects.md). +If you have an example project to add to this list, suggest an edit by clicking **Edit this page** below. diff --git a/website/docs/faqs/exclude-table-from-freshness.md b/website/docs/faqs/Project/exclude-table-from-freshness.md similarity index 82% rename from website/docs/faqs/exclude-table-from-freshness.md rename to website/docs/faqs/Project/exclude-table-from-freshness.md index 17a21940e44..c8aacce2de9 100644 --- a/website/docs/faqs/exclude-table-from-freshness.md +++ b/website/docs/faqs/Project/exclude-table-from-freshness.md @@ -1,5 +1,9 @@ --- title: How do I exclude a table from a freshness snapshot? +description: "Use null to exclude a table from a freshness snapshot" +sidebar_label: 'Exclude a table from freshness snapshot ' +id: exclude-table-from-freshness + --- Some tables in a data source may be updated infrequently. If you've set a `freshness` property at the source level, this is likely to fail checks. diff --git a/website/docs/faqs/Project/extract-load-support.md b/website/docs/faqs/Project/extract-load-support.md new file mode 100644 index 00000000000..f5fafab68b1 --- /dev/null +++ b/website/docs/faqs/Project/extract-load-support.md @@ -0,0 +1,7 @@ +--- +title: Does dbt offer extract and load functionality? +description: "dbt is a transformation tool" +sidebar_label: 'Does dbt offer extract and load functionality?' +id: transformation-tool +--- +dbt is a transformation tool. It is _not_ designed for extract or load functionality, and dbt Labs strongly recommends against using dbt in this way. Support is not provided for extract or load functionality. diff --git a/website/docs/faqs/multiple-resource-yml-files.md b/website/docs/faqs/Project/multiple-resource-yml-files.md similarity index 77% rename from website/docs/faqs/multiple-resource-yml-files.md rename to website/docs/faqs/Project/multiple-resource-yml-files.md index 3f736a97004..06738aeae65 100644 --- a/website/docs/faqs/multiple-resource-yml-files.md +++ b/website/docs/faqs/Project/multiple-resource-yml-files.md @@ -1,5 +1,9 @@ --- title: Should I use separate files to declare resource properties, or one large file? +description: "Structuring your files and project" +sidebar_label: 'Ways to structure your project/files' +id: multiple-resource-yml-files + --- It's up to you: - Some folks find it useful to have one file per model (or source / snapshot / seed etc) diff --git a/website/docs/faqs/properties-not-in-config.md b/website/docs/faqs/Project/properties-not-in-config.md similarity index 82% rename from website/docs/faqs/properties-not-in-config.md rename to website/docs/faqs/Project/properties-not-in-config.md index 9ef7ef86bf9..9e334c55ce3 100644 --- a/website/docs/faqs/properties-not-in-config.md +++ b/website/docs/faqs/Project/properties-not-in-config.md @@ -1,5 +1,9 @@ --- title: Can I add tests and descriptions in a config block? +description: "Types of properties defined in config blocks" +sidebar_label: 'Types of properties defined in config blocks' +id: properties-not-in-config + --- In v0.21, dbt added the ability to define node configs in `.yml` files, in addition to `config()` blocks and `dbt_project.yml`. But the reverse isn't always true: there are some things in `.yml` files that can _only_ be defined there. @@ -13,7 +17,7 @@ Certain properties are special, because: These properties are: - [`description`](resource-properties/description) - [`tests`](resource-properties/tests) -- [`docs`](resource-properties/docs) +- [`docs`](/reference/resource-configs/docs) - `columns` - [`quote`](resource-properties/quote) - [`source` properties](source-properties) (e.g. `loaded_at_field`, `freshness`) diff --git a/website/docs/faqs/resource-yml-name.md b/website/docs/faqs/Project/resource-yml-name.md similarity index 69% rename from website/docs/faqs/resource-yml-name.md rename to website/docs/faqs/Project/resource-yml-name.md index 79baf07b5ca..8a6ebe96134 100644 --- a/website/docs/faqs/resource-yml-name.md +++ b/website/docs/faqs/Project/resource-yml-name.md @@ -1,9 +1,13 @@ --- title: If I can name these files whatever I'd like, what should I name them? +description: "Recommendations on how to name your files" +sidebar_label: 'How to name your files' +id: resource-yml-name + --- It's up to you! Here's a few options: - Default to the existing terminology: `schema.yml` (though this does make it hard to find the right file over time) - Use the same name as your directory (assuming you're using sensible names for your directories) - If you test and document one model (or seed, snapshot, macro etc.) per file, you can give it the same name as the model (or seed, snapshot, macro etc.) -Choose what works for your team. We have more recommendations in our guide on [structuring dbt project](https://discourse.getdbt.com/t/how-we-structure-our-dbt-projects/355). +Choose what works for your team. We have more recommendations in our guide on [structuring dbt projects](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview). diff --git a/website/docs/faqs/schema-yml-name.md b/website/docs/faqs/Project/schema-yml-name.md similarity index 78% rename from website/docs/faqs/schema-yml-name.md rename to website/docs/faqs/Project/schema-yml-name.md index e7563caaba2..9f8683647ca 100644 --- a/website/docs/faqs/schema-yml-name.md +++ b/website/docs/faqs/Project/schema-yml-name.md @@ -1,5 +1,9 @@ --- title: Does my `.yml` file containing tests and descriptions need to be named `schema.yml`? +description: "Naming your Tests and Descriptions files" +sidebar_label: 'How to name your tests and descriptions files' +id: schema-yml-name + --- No! You can name this file whatever you want (including `whatever_you_want.yml`), so long as: * The file is in your `models/` directory¹ diff --git a/website/docs/faqs/separate-profile.md b/website/docs/faqs/Project/separate-profile.md similarity index 68% rename from website/docs/faqs/separate-profile.md rename to website/docs/faqs/Project/separate-profile.md index 0cdda094f0a..b2a6f521dc3 100644 --- a/website/docs/faqs/separate-profile.md +++ b/website/docs/faqs/Project/separate-profile.md @@ -1,5 +1,9 @@ --- title: Why are profiles stored outside of my project? +description: "Profiles stored outside version control" +sidebar_label: 'Profiles stored outside project' +id: separate-profile + --- Profiles are stored separately to dbt projects to avoid checking credentials into version control. Database credentials are extremely sensitive information and should **never be checked into version control**. diff --git a/website/docs/faqs/source-has-bad-name.md b/website/docs/faqs/Project/source-has-bad-name.md similarity index 83% rename from website/docs/faqs/source-has-bad-name.md rename to website/docs/faqs/Project/source-has-bad-name.md index e091ee01421..10fb2f03d78 100644 --- a/website/docs/faqs/source-has-bad-name.md +++ b/website/docs/faqs/Project/source-has-bad-name.md @@ -1,5 +1,9 @@ --- title: What if my source is in a poorly named schema or table? +description: "Use schema and identifier properities to define names" +sidebar_label: 'Source is in a poorly-named scheme or table' +id: source-has-bad-name + --- By default, dbt will use the `name:` parameters to construct the source reference. diff --git a/website/docs/faqs/source-in-different-database.md b/website/docs/faqs/Project/source-in-different-database.md similarity index 67% rename from website/docs/faqs/source-in-different-database.md rename to website/docs/faqs/Project/source-in-different-database.md index 475aa41de2b..cdfa3c62b76 100644 --- a/website/docs/faqs/source-in-different-database.md +++ b/website/docs/faqs/Project/source-in-different-database.md @@ -1,5 +1,9 @@ --- title: What if my source is in a different database to my target database? +description: "Use database property to define source in a diff database" +sidebar_label: 'Source is in a different database to target database' +id: source-in-different-database + --- Use the [`database` property](resource-properties/database) to define the database that the source is in. diff --git a/website/docs/faqs/structure-a-project.md b/website/docs/faqs/Project/structure-a-project.md similarity index 69% rename from website/docs/faqs/structure-a-project.md rename to website/docs/faqs/Project/structure-a-project.md index d1e2c240cf7..e627c5e4793 100644 --- a/website/docs/faqs/structure-a-project.md +++ b/website/docs/faqs/Project/structure-a-project.md @@ -1,5 +1,9 @@ --- title: How should I structure my project? +description: "Recommendations on how to structure project" +sidebar_label: 'How to structure project' +id: structure-a-project + --- There's no one best way to structure a project! Every organization is unique. diff --git a/website/docs/faqs/which-materialization.md b/website/docs/faqs/Project/which-materialization.md similarity index 70% rename from website/docs/faqs/which-materialization.md rename to website/docs/faqs/Project/which-materialization.md index b74547c9416..32599a61746 100644 --- a/website/docs/faqs/which-materialization.md +++ b/website/docs/faqs/Project/which-materialization.md @@ -1,5 +1,9 @@ --- title: Which materialization should I use for my model? +description: "Recommendations on materializations to use for models" +sidebar_label: 'What materializations to use' +id: which-materialization + --- Start out with views, and then change models to tables when required for performance reasons (i.e. downstream queries have slowed). diff --git a/website/docs/faqs/which-schema.md b/website/docs/faqs/Project/which-schema.md similarity index 69% rename from website/docs/faqs/which-schema.md rename to website/docs/faqs/Project/which-schema.md index 0378410b19f..f0634ac8c85 100644 --- a/website/docs/faqs/which-schema.md +++ b/website/docs/faqs/Project/which-schema.md @@ -1,10 +1,14 @@ --- title: How did dbt choose which schema to build my models in? +description: "You can change your target schema in your Environment Settings." +sidebar_label: 'dbt builds models in your target schema' +id: which-schema + --- By default, dbt builds models in your target schema. To change your target schema: * If you're developing in **dbt Cloud**, these are set for each user when you first use a development environment. * If you're developing with the **dbt CLI**, this is the `schema:` parameter in your `profiles.yml` file. -If you wish to split your models across multiple schemas, check out the docs on [using custom schemas](using-custom-schemas). +If you wish to split your models across multiple schemas, check out the docs on [using custom schemas](/docs/build/custom-schemas). -Note: on BigQuery, `dataset` is used interchangeably with `schema`. \ No newline at end of file +Note: on BigQuery, `dataset` is used interchangeably with `schema`. diff --git a/website/docs/faqs/why-not-write-dml.md b/website/docs/faqs/Project/why-not-write-dml.md similarity index 86% rename from website/docs/faqs/why-not-write-dml.md rename to website/docs/faqs/Project/why-not-write-dml.md index a553460de57..cf778c2086c 100644 --- a/website/docs/faqs/why-not-write-dml.md +++ b/website/docs/faqs/Project/why-not-write-dml.md @@ -1,14 +1,19 @@ --- -title: Why can't I just write DML in my transformations? +title: "Why can't I just write DML in my transformations?" +description: "Using SQL in your transformations instead of DML." +sidebar_label: 'Why not write in DML' +id: why-not-write-dml + --- -Or: -### I'm already familiar with DML, and can write these statements manually, why should I use dbt to do this? #### `select` statements make transformations accessible + More people know how to write `select` statements, than , making the transformation layer accessible to more people! -#### Writing good DML is hard. +#### Writing good DML is hard + If you write the / DML yourself you can end up getting yourself tangled in problems like: + * What happens if the already exists? Or this table already exists as a , but now I want it to be a table? * What if the schema already exists? Or, should I check if the schema already exists? * How do I replace a model atomically (such that there's no down-time for someone querying the table) @@ -18,9 +23,11 @@ If you write the / DML yourself you can end up getting yoursel Each of these problems _can_ be solved, but they are unlikely to be the best use of your time. #### dbt does more than generate SQL + You can test your models, generate documentation, create snapshots, and more! #### You reduce your vendor lock in + SQL dialects tend to diverge the most in DML and DDL (rather than in `select` statements) — check out the example [here](sql-dialect). By writing less SQL, it can make a migration to a new database technology easier. If you do need to write custom DML, there are ways to do this in dbt using [custom materializations](creating-new-materializations). diff --git a/website/docs/faqs/why-so-many-macros.md b/website/docs/faqs/Project/why-so-many-macros.md similarity index 77% rename from website/docs/faqs/why-so-many-macros.md rename to website/docs/faqs/Project/why-so-many-macros.md index 574c86a0899..5687d119cf8 100644 --- a/website/docs/faqs/why-so-many-macros.md +++ b/website/docs/faqs/Project/why-so-many-macros.md @@ -1,10 +1,14 @@ --- title: Why does my dbt output have so many macros in it? +description: "Your dbt project includes many macros." +sidebar_label: 'dbt project has a lot of macros' +id: why-so-many-macros + --- The output of a dbt run counts over 100 macros in your project! -``` +```shell $ dbt run Running with dbt=0.17.0 Found 1 model, 0 tests, 0 snapshots, 0 analyses, 138 macros, 0 operations, 0 seed files, 0 sources diff --git a/website/docs/faqs/why-version-2.md b/website/docs/faqs/Project/why-version-2.md similarity index 78% rename from website/docs/faqs/why-version-2.md rename to website/docs/faqs/Project/why-version-2.md index aec614f2c3c..c86aab37262 100644 --- a/website/docs/faqs/why-version-2.md +++ b/website/docs/faqs/Project/why-version-2.md @@ -1,5 +1,9 @@ --- title: "Why do model and source yml files always start with `version: 2`?" +description: ".yml file structure more extensible with version 2." +sidebar_label: 'Why does yml file start with version 2' +id: why-version-2 + --- Once upon a time, the structure of these `.yml` files was very different (s/o to anyone who was using dbt back then!). Adding `version: 2` allowed us to make this structure more extensible. diff --git a/website/docs/faqs/Project/yaml-file-extension.md b/website/docs/faqs/Project/yaml-file-extension.md new file mode 100644 index 00000000000..6ba113db52c --- /dev/null +++ b/website/docs/faqs/Project/yaml-file-extension.md @@ -0,0 +1,9 @@ +--- +title: Can I use a yaml file extension? +description: "dbt will only search for files with a `.yml` file extension" +sidebar_label: '.yml file extension search' +id: yaml-file-extension + +--- + +No. At present, dbt will only search for files with a `.yml` file extension. In a future release of dbt, dbt will also search for files with a `.yaml` file extension. diff --git a/website/docs/faqs/Runs/checking-logs.md b/website/docs/faqs/Runs/checking-logs.md new file mode 100644 index 00000000000..dbfdb6806a1 --- /dev/null +++ b/website/docs/faqs/Runs/checking-logs.md @@ -0,0 +1,16 @@ +--- +title: How can I see the SQL that dbt is running? +description: "Review logs to check the sql dbt is running" +sidebar_label: 'Reviewing sql that dbt runs' +id: checking-logs + +--- + +To check out the SQL that dbt is running, you can look in: + +* dbt Cloud: + * Within the run output, click on a model name, and then select "Details" +* dbt CLI: + * The `target/compiled/` directory for compiled `select` statements + * The `target/run/` directory for compiled `create` statements + * The `logs/dbt.log` file for verbose logging. diff --git a/website/docs/faqs/failed-prod-run.md b/website/docs/faqs/Runs/failed-prod-run.md similarity index 67% rename from website/docs/faqs/failed-prod-run.md rename to website/docs/faqs/Runs/failed-prod-run.md index 575419a20bd..23cabad7692 100644 --- a/website/docs/faqs/failed-prod-run.md +++ b/website/docs/faqs/Runs/failed-prod-run.md @@ -1,5 +1,9 @@ --- title: What happens if one of my runs fails? +description: "Set up notifications to debug failed runs" +sidebar_label: 'Notifications to debug failed runs' +id: failed-prod-run + --- If you're using dbt Cloud, we recommend setting up email and Slack notifications (`Account Settings > Notifications`) for any failed runs. Then, debug these runs the same way you would debug any runs in development. diff --git a/website/docs/faqs/Runs/failed-tests.md b/website/docs/faqs/Runs/failed-tests.md new file mode 100644 index 00000000000..bfee565ef61 --- /dev/null +++ b/website/docs/faqs/Runs/failed-tests.md @@ -0,0 +1,18 @@ +--- +title: One of my tests failed, how can I debug it? +description: "You can debug failed tests by finding the SQL" +sidebar_label: 'Debug failed tests' +id: failed-tests + +--- +To debug a failing test, find the SQL that dbt ran by: + +* dbt Cloud: + * Within the test output, click on the failed test, and then select "Details" + +* dbt CLI: + * Open the file path returned as part of the error message. + * Navigate to the `target/compiled/schema_tests` directory for all compiled test queries + +Copy the SQL into a query editor (in dbt Cloud, you can paste it into a new `Statement`), and run the query to find the records that failed. + diff --git a/website/docs/faqs/run-downstream-of-seed.md b/website/docs/faqs/Runs/run-downstream-of-seed.md similarity index 67% rename from website/docs/faqs/run-downstream-of-seed.md rename to website/docs/faqs/Runs/run-downstream-of-seed.md index f8b288aa2e2..bbf295f403d 100644 --- a/website/docs/faqs/run-downstream-of-seed.md +++ b/website/docs/faqs/Runs/run-downstream-of-seed.md @@ -1,11 +1,15 @@ --- title: How do I run models downstream of a seed? +description: "You run models downstread using model selection syntax" +sidebar_label: 'Run models downstream of a seed' +id: run-downstream-of-seed + --- You can run models downstream of a seed using the [model selection syntax](node-selection/syntax), and treating the seed like a model. For example, the following would run all models downstream of a seed named `country_codes`: -``` +```shell $ dbt run --select country_codes+ ``` diff --git a/website/docs/faqs/run-one-model.md b/website/docs/faqs/Runs/run-one-model.md similarity index 69% rename from website/docs/faqs/run-one-model.md rename to website/docs/faqs/Runs/run-one-model.md index 15747c60849..dfbb54f2087 100644 --- a/website/docs/faqs/run-one-model.md +++ b/website/docs/faqs/Runs/run-one-model.md @@ -1,11 +1,14 @@ --- title: How do I run one model at a time? +description: "Use select flags to run one model at a time" +sidebar_label: 'Run one model at a time' +id: run-one-model + --- To run one model, use the `--select` flag (or `-s` flag), followed by the name of the model: - -``` +```shell $ dbt run --select customers ``` diff --git a/website/docs/faqs/run-one-snapshot.md b/website/docs/faqs/Runs/run-one-snapshot.md similarity index 68% rename from website/docs/faqs/run-one-snapshot.md rename to website/docs/faqs/Runs/run-one-snapshot.md index 31c7d7e0e11..7029f3c02da 100644 --- a/website/docs/faqs/run-one-snapshot.md +++ b/website/docs/faqs/Runs/run-one-snapshot.md @@ -1,11 +1,14 @@ --- title: How do I run one snapshot at a time? +description: "Use select flags to run one snapshot at a time" +sidebar_label: 'Run one snapshot at a time' +id: run-one-snapshot + --- To run one snapshot, use the `--select` flag, followed by the name of the snapshot: - -``` +```shell $ dbt snapshot --select order_snapshot ``` diff --git a/website/docs/faqs/running-models-downstream-of-source.md b/website/docs/faqs/Runs/running-models-downstream-of-source.md similarity index 70% rename from website/docs/faqs/running-models-downstream-of-source.md rename to website/docs/faqs/Runs/running-models-downstream-of-source.md index e8fca9c64c8..9a56dd81294 100644 --- a/website/docs/faqs/running-models-downstream-of-source.md +++ b/website/docs/faqs/Runs/running-models-downstream-of-source.md @@ -1,16 +1,20 @@ --- title: How do I run models downstream of one source? +description: "Use source selector to run models downstream of a source" +sidebar_label: 'Run models downstream of one source' +id: running-model-downstream-of-source + --- To run models downstream of a source, use the `source:` selector: -``` +```shell $ dbt run --select source:jaffle_shop+ ``` (You can also use the `-s` shorthand here instead of `--select`) To run models downstream of one source : -``` +```shell $ dbt run --select source:jaffle_shop.orders+ ``` diff --git a/website/docs/faqs/snapshot-frequency.md b/website/docs/faqs/Runs/snapshot-frequency.md similarity index 79% rename from website/docs/faqs/snapshot-frequency.md rename to website/docs/faqs/Runs/snapshot-frequency.md index 3f78584effc..9a964b94cc7 100644 --- a/website/docs/faqs/snapshot-frequency.md +++ b/website/docs/faqs/Runs/snapshot-frequency.md @@ -1,5 +1,9 @@ --- title: How often should I run the snapshot command? +description: "Snapshot command intended to run on hourly/daily schedule " +sidebar_label: 'Snapshot command schedule' +id: snapshot-frequency + --- Snapshots are a batch-based approach to [change data capture](https://en.wikipedia.org/wiki/Change_data_capture). The `dbt snapshot` command must be run on a schedule to ensure that changes to tables are actually recorded! While individual use-cases may vary, snapshots are intended to be run between hourly and daily. If you find yourself snapshotting more frequently than that, consider if there isn't a more appropriate way to capture changes in your source data tables. diff --git a/website/docs/faqs/build-one-seed.md b/website/docs/faqs/Seeds/build-one-seed.md similarity index 73% rename from website/docs/faqs/build-one-seed.md rename to website/docs/faqs/Seeds/build-one-seed.md index bdd80651a07..974258cbe19 100644 --- a/website/docs/faqs/build-one-seed.md +++ b/website/docs/faqs/Seeds/build-one-seed.md @@ -1,11 +1,18 @@ --- title: How do I build one seed at a time? +description: "Use select flag to build one seed at a time" +sidebar_label: "Build one seed at a time" +id: build-one-seed --- + As of v0.16.0, you can use a `--select` option with the `dbt seed` command, like so: -``` +```shell + $ dbt seed --select country_codes + ``` + There is also an `--exclude` option. Check out more in the [model selection syntax](node-selection/syntax) documentation. diff --git a/website/docs/faqs/full-refresh-seed.md b/website/docs/faqs/Seeds/full-refresh-seed.md similarity index 93% rename from website/docs/faqs/full-refresh-seed.md rename to website/docs/faqs/Seeds/full-refresh-seed.md index 86b791f4e58..b59f7a8f60d 100644 --- a/website/docs/faqs/full-refresh-seed.md +++ b/website/docs/faqs/Seeds/full-refresh-seed.md @@ -1,5 +1,9 @@ --- title: The columns of my seed changed, and now I get an error when running the `seed` command, what should I do? +description: "Rerun the command with a `--full-refresh` flag" +sidebar_label: 'debug error when columns of seed changes' +id: full-refresh-seed + --- If you changed the columns of your seed, you may get a `Database Error`: @@ -12,7 +16,7 @@ If you changed the columns of your seed, you may get a `Database Error`: }> -``` +```shell $ dbt seed Running with dbt=0.16.0-rc2 Found 0 models, 0 tests, 0 snapshots, 0 analyses, 130 macros, 0 operations, 1 seed file, 0 sources @@ -37,7 +41,7 @@ Done. PASS=0 WARN=0 ERROR=1 SKIP=0 TOTAL=1 -``` +```shell $ dbt seed Running with dbt=0.16.0-rc2 Found 0 models, 0 tests, 0 snapshots, 0 analyses, 149 macros, 0 operations, 1 seed file, 0 sources @@ -62,7 +66,8 @@ Done. PASS=0 WARN=0 ERROR=1 SKIP=0 TOTAL=1 In this case, you should rerun the command with a `--full-refresh` flag, like so: -``` + +```shell dbt seed --full-refresh ``` diff --git a/website/docs/faqs/leading-zeros-in-seed.md b/website/docs/faqs/Seeds/leading-zeros-in-seed.md similarity index 75% rename from website/docs/faqs/leading-zeros-in-seed.md rename to website/docs/faqs/Seeds/leading-zeros-in-seed.md index c59e663c9cc..f2ffeb05f3f 100644 --- a/website/docs/faqs/leading-zeros-in-seed.md +++ b/website/docs/faqs/Seeds/leading-zeros-in-seed.md @@ -1,5 +1,9 @@ --- title: How do I preserve leading zeros in a seed? +description: "Use column types to include leading zeros in seed" +sidebar_label: 'Include leading zeroes in your seed file' +id: leading-zeros-in-seed + --- If you need to preserve leading zeros (for example in a zipcode or mobile number): diff --git a/website/docs/faqs/load-raw-data-with-seed.md b/website/docs/faqs/Seeds/load-raw-data-with-seed.md similarity index 72% rename from website/docs/faqs/load-raw-data-with-seed.md rename to website/docs/faqs/Seeds/load-raw-data-with-seed.md index 142ce37979e..e33780f5dcc 100644 --- a/website/docs/faqs/load-raw-data-with-seed.md +++ b/website/docs/faqs/Seeds/load-raw-data-with-seed.md @@ -1,9 +1,13 @@ --- title: Can I use seeds to load raw data? +description: "Use seeds to load business specific logic" +sidebar_label: 'Seed data files requirements' +id: load-raw-data-with-seed + --- Seeds should **not** be used to load raw data (for example, large CSV exports from a production database). Since seeds are version controlled, they are best suited to files that contain business-specific logic, for example a list of country codes or user IDs of employees. -Loading CSVs using dbt's seed functionality is not performant for large files. Consider using a different tool to load these CSVs into your data warehouse. +Loading CSVs using dbt's seed functionality is not performant for large files. Consider using a different tool to load these CSVs into your . diff --git a/website/docs/faqs/seed-custom-schemas.md b/website/docs/faqs/Seeds/seed-custom-schemas.md similarity index 70% rename from website/docs/faqs/seed-custom-schemas.md rename to website/docs/faqs/Seeds/seed-custom-schemas.md index 3031600029e..66b17807904 100644 --- a/website/docs/faqs/seed-custom-schemas.md +++ b/website/docs/faqs/Seeds/seed-custom-schemas.md @@ -1,7 +1,10 @@ --- -title: Can I build my seeds in a schema other than my target schema? +title: Can I build my seeds in a schema other than my target schema or can I split my seeds across multiple schemas? +description: "Use schema config in your dbt_project.yml file" +sidebar_label: 'Build seeds in a schema outside target schema' +id: seed-custom-schemas + --- -## Or: Can I split my seeds across multiple schemas? Yes! Use the [schema](reference/resource-configs/schema.md) configuration in your `dbt_project.yml` file. diff --git a/website/docs/faqs/seed-datatypes.md b/website/docs/faqs/Seeds/seed-datatypes.md similarity index 78% rename from website/docs/faqs/seed-datatypes.md rename to website/docs/faqs/Seeds/seed-datatypes.md index 75d1cb51291..cc4f7363b3c 100644 --- a/website/docs/faqs/seed-datatypes.md +++ b/website/docs/faqs/Seeds/seed-datatypes.md @@ -1,5 +1,9 @@ --- title: How do I set a datatype for a column in my seed? +description: "Use column_types to set a datatype" +sidebar_label: 'Set a datatype for a column in seed' +id: seed-datatypes + --- dbt will infer the datatype for each column based on the data in your CSV. diff --git a/website/docs/faqs/seed-hooks.md b/website/docs/faqs/Seeds/seed-hooks.md similarity index 68% rename from website/docs/faqs/seed-hooks.md rename to website/docs/faqs/Seeds/seed-hooks.md index 77daba1a558..9af3be74e13 100644 --- a/website/docs/faqs/seed-hooks.md +++ b/website/docs/faqs/Seeds/seed-hooks.md @@ -1,8 +1,13 @@ --- title: Do hooks run with seeds? +description: "Run hooks with seeds and configure them in project.yml file" +sidebar_label: "Use hooks to run with seeds" +id: seed-hooks + --- Yes! The following hooks are available: + - [pre-hooks & post-hooks](reference/resource-configs/pre-hook-post-hook.md) - [on-run-start & on-run-end hooks](reference/project-configs/on-run-start-on-run-end.md) diff --git a/website/docs/faqs/snapshot-hooks.md b/website/docs/faqs/Snapshots/snapshot-hooks.md similarity index 76% rename from website/docs/faqs/snapshot-hooks.md rename to website/docs/faqs/Snapshots/snapshot-hooks.md index df8b2429752..f60bd48e4c0 100644 --- a/website/docs/faqs/snapshot-hooks.md +++ b/website/docs/faqs/Snapshots/snapshot-hooks.md @@ -1,8 +1,13 @@ --- title: Do hooks run with snapshots? +description: "Run hooks with snapshots" +sidebar_label: 'Use hooks to run with snapshots' +id: snapshot-hooks + --- Yes! The following hooks are available for snapshots: + - [pre-hooks](/reference/resource-configs/pre-hook-post-hook/) - [post-hooks](/reference/resource-configs/pre-hook-post-hook/) - [on-run-start](/reference/resource-configs/pre-hook-post-hook/) diff --git a/website/docs/faqs/snapshot-schema-changes.md b/website/docs/faqs/Snapshots/snapshot-schema-changes.md similarity index 83% rename from website/docs/faqs/snapshot-schema-changes.md rename to website/docs/faqs/Snapshots/snapshot-schema-changes.md index a43a02965c9..75a5babc748 100644 --- a/website/docs/faqs/snapshot-schema-changes.md +++ b/website/docs/faqs/Snapshots/snapshot-schema-changes.md @@ -1,5 +1,9 @@ --- title: What happens if I add new columns to my snapshot query? +description: "Reconcile changes when adding new columns in snapshot query" +sidebar_label: 'Snapshot column changes' +id: snapshot-schema-changes + --- When the columns of your source query changes, dbt will attempt to reconcile this change in the destination snapshot . dbt does this by: diff --git a/website/docs/faqs/snapshot-target-schema.md b/website/docs/faqs/Snapshots/snapshot-target-schema.md similarity index 79% rename from website/docs/faqs/snapshot-target-schema.md rename to website/docs/faqs/Snapshots/snapshot-target-schema.md index 9030b50009f..70c5b56f067 100644 --- a/website/docs/faqs/snapshot-target-schema.md +++ b/website/docs/faqs/Snapshots/snapshot-target-schema.md @@ -1,5 +1,9 @@ --- title: Why is there only one `target_schema` for snapshots? +description: "Snapshots build into the same `target_schema`" +sidebar_label: 'Build snapshots into one `target_schema`' +id: snapshot-target-schema + --- Snapshots build into the same `target_schema`, no matter who is running them. @@ -14,4 +18,4 @@ Instead, in the models that `ref` your snapshots, it makes more sense to `select For this reason, there is only _one_ `target_schema`, which is _not_ environment-aware by default. -However, this can create problems if you need to run a `snapshot` command when developing your models, or during a CI run. Fortunately, there's a few workarounds — check out [this Discourse article](https://discourse.getdbt.com/t/using-dynamic-schemas-for-snapshots/1070). +However, this can create problems if you need to run a `snapshot` command when developing your models, or during a CI run. Fortunately, there's a few workarounds — check out [this forum article](https://discourse.getdbt.com/t/using-dynamic-schemas-for-snapshots/1070). diff --git a/website/docs/faqs/Snapshots/snapshotting-freshness-for-one-source.md b/website/docs/faqs/Snapshots/snapshotting-freshness-for-one-source.md new file mode 100644 index 00000000000..c1cc7687b11 --- /dev/null +++ b/website/docs/faqs/Snapshots/snapshotting-freshness-for-one-source.md @@ -0,0 +1,24 @@ +--- +title: How do I snapshot freshness for one source only? +description: "Use select flag to snapshot for specific sources" +sidebar_label: 'Snapshot freshness for specific sources.' +id: snapshotting-freshness-for-one-source + +--- + +:::info As of dbt Core v0.21, you need to prefix sources with the source: selection method. In previous versions of dbt, sources were specified by name only. ::: + +Use the `--select` flag to snapshot freshness for specific sources. Eg: + +```shell +# Snapshot freshness for all Jaffle Shop tables: +$ dbt source freshness --select source:jaffle_shop + +# Snapshot freshness for a particular source : +$ dbt source freshness --select source:jaffle_shop.orders + +# Snapshot freshness for multiple particular source tables: +$ dbt source freshness --select source:jaffle_shop.orders source:jaffle_shop.customers +``` + +See the [`source freshness` command reference](commands/source) for more information. diff --git a/website/docs/faqs/available-tests.md b/website/docs/faqs/Tests/available-tests.md similarity index 51% rename from website/docs/faqs/available-tests.md rename to website/docs/faqs/Tests/available-tests.md index 07fb08bafe1..f08e6841bd0 100644 --- a/website/docs/faqs/available-tests.md +++ b/website/docs/faqs/Tests/available-tests.md @@ -1,12 +1,17 @@ --- title: What tests are available for me to use in dbt? +description: "Types of tests to use in dbt" +sidebar_label: 'Available test to use in dbt' +id: available-tests + --- Out of the box, dbt ships with the following tests: + * `unique` * `not_null` * `accepted_values` * `relationships` (i.e. referential integrity) -You can also write your own [custom schema tests](building-a-dbt-project/tests). +You can also write your own [custom schema tests](/docs/build/tests). -Some additional custom schema tests have been open-sourced in the [dbt-utils package](https://github.com/dbt-labs/dbt-utils/tree/0.2.4/#schema-tests), check out the docs on [packages](package-management) to learn how to make these tests available in your project. +Some additional custom schema tests have been open-sourced in the [dbt-utils package](https://github.com/dbt-labs/dbt-utils/tree/0.2.4/#schema-tests), check out the docs on [packages](/docs/build/packages) to learn how to make these tests available in your project. diff --git a/website/docs/faqs/configurable-data-path.md b/website/docs/faqs/Tests/configurable-data-path.md similarity index 76% rename from website/docs/faqs/configurable-data-path.md rename to website/docs/faqs/Tests/configurable-data-path.md index 24f3f1800b7..bacc732433b 100644 --- a/website/docs/faqs/configurable-data-path.md +++ b/website/docs/faqs/Tests/configurable-data-path.md @@ -1,10 +1,14 @@ --- title: Can I store my seeds in a directory other than the `seeds` directory in my project? +description: "Where to store seeds in a directory" +sidebar_label: 'How to name seeds directory' +id: configurable-data-path + --- -- **v1.0.0:** The config 'data-paths' has been deprecated in favor of [`seed-paths`](seed-paths). +- **v1.0.0:** The config 'data-paths' has been deprecated in favor of [`seed-paths`](seed-paths). diff --git a/website/docs/faqs/configurable-data-test-path.md b/website/docs/faqs/Tests/configurable-data-test-path.md similarity index 82% rename from website/docs/faqs/configurable-data-test-path.md rename to website/docs/faqs/Tests/configurable-data-test-path.md index e4bda68b810..2c3e8d729ba 100644 --- a/website/docs/faqs/configurable-data-test-path.md +++ b/website/docs/faqs/Tests/configurable-data-test-path.md @@ -1,5 +1,9 @@ --- title: Can I store my tests in a directory other than the `tests` directory in my project? +description: "Where to store tests in a directory" +sidebar_label: 'How to name tests directory' +id: configurable-data-test-path + --- By default, dbt expects your singular test files to be located in the `tests` subdirectory of your project, and generic test definitions to be located in `tests/generic` or `macros`. diff --git a/website/docs/faqs/custom-test-thresholds.md b/website/docs/faqs/Tests/custom-test-thresholds.md similarity index 79% rename from website/docs/faqs/custom-test-thresholds.md rename to website/docs/faqs/Tests/custom-test-thresholds.md index b84dc14d4cd..14dd36666c0 100644 --- a/website/docs/faqs/custom-test-thresholds.md +++ b/website/docs/faqs/Tests/custom-test-thresholds.md @@ -1,9 +1,14 @@ --- title: Can I set test failure thresholds? +description: "Use configs to set custom failure thresholds in tests" +sidebar_label: 'How to set failure thresholds in test' +id: custom-test-thresholds + --- As of `v0.20.0`, you can use the `error_if` and `warn_if` configs to set custom failure thresholds in your tests. For more details, see [reference](reference/resource-configs/severity) for more information. For dbt `v0.19.0` and earlier, you could try these possible solutions: + * Setting the [severity](resource-properties/tests#severity) to `warn`, or: * Writing a [custom generic test](custom-generic-tests) that accepts a threshold argument ([example](https://discourse.getdbt.com/t/creating-an-error-threshold-for-schema-tests/966)) diff --git a/website/docs/faqs/multiple-test-files.md b/website/docs/faqs/Tests/multiple-test-files.m similarity index 60% rename from website/docs/faqs/multiple-test-files.md rename to website/docs/faqs/Tests/multiple-test-files.m index 04e62971400..2ae6f4f93d3 100644 --- a/website/docs/faqs/multiple-test-files.md +++ b/website/docs/faqs/Tests/multiple-test-files.m @@ -1,4 +1,8 @@ --- title: Do all my tests go in one file? +description: "Structuring files for tests" +sidebar_label: 'How to structure files for tests' +id: multiple-test-files + --- No! You can use as many files as you want! Some folks find it useful to have one file per model, we tend to have one per directory. diff --git a/website/docs/faqs/recommended-tests.md b/website/docs/faqs/Tests/recommended-tests.md similarity index 61% rename from website/docs/faqs/recommended-tests.md rename to website/docs/faqs/Tests/recommended-tests.md index 8ed2e736307..d79fb173e9e 100644 --- a/website/docs/faqs/recommended-tests.md +++ b/website/docs/faqs/Tests/recommended-tests.md @@ -1,8 +1,12 @@ --- title: What tests should I add to my project? +description: "Recommended tests for project" +sidebar_label: 'Recommended tests for project' +id: recommended-tests + --- We recommend that every model has a test on a , that is, a column that is `unique` and `not_null`. We also recommend that you test any assumptions on your source data. For example, if you believe that your payments can only be one of three payment methods, you should test that assumption regularly — a new payment method may introduce logic errors in your SQL. -In advanced dbt projects, we recommend using [sources](using-sources) and running these source data-integrity tests against the sources rather than models. +In advanced dbt projects, we recommend using [sources](/docs/build/sources) and running these source data-integrity tests against the sources rather than models. diff --git a/website/docs/faqs/test-one-model.md b/website/docs/faqs/Tests/test-one-model.md similarity index 74% rename from website/docs/faqs/test-one-model.md rename to website/docs/faqs/Tests/test-one-model.md index 975c4cd5157..16f80e6934e 100644 --- a/website/docs/faqs/test-one-model.md +++ b/website/docs/faqs/Tests/test-one-model.md @@ -1,9 +1,14 @@ --- title: How do I test one model at a time? +description: "Use select flag to test one model at a time" +sidebar_label: 'Testing one model at a time' +id: test-one-model + --- Running tests on one model looks very similar to running a model: use the `--select` flag (or `-s` flag), followed by the name of the model: -``` + +```shell dbt test --select customers ``` diff --git a/website/docs/faqs/testing-seeds.md b/website/docs/faqs/Tests/testing-seeds.md similarity index 82% rename from website/docs/faqs/testing-seeds.md rename to website/docs/faqs/Tests/testing-seeds.md index 44b5a3cacc6..75c7fcf0ff1 100644 --- a/website/docs/faqs/testing-seeds.md +++ b/website/docs/faqs/Tests/testing-seeds.md @@ -1,12 +1,16 @@ --- title: How do I test and document seeds? +description: "Use a schema file to test and document seeds" +sidebar_label: 'Test and document seeds' +id: testing-seeds + --- The `seeds:` key is new in 0.16.0. Prior to this, use a `models:` key instead. To test and document seeds, use a [schema file](configs-and-properties) and nest the configurations under a `seeds:` key -## Example: +## Example diff --git a/website/docs/faqs/testing-sources.md b/website/docs/faqs/Tests/testing-sources.md similarity index 71% rename from website/docs/faqs/testing-sources.md rename to website/docs/faqs/Tests/testing-sources.md index 2f2603856e4..06051eae7e9 100644 --- a/website/docs/faqs/testing-sources.md +++ b/website/docs/faqs/Tests/testing-sources.md @@ -1,23 +1,28 @@ --- title: How do I run tests on just my sources? +description: "Use the select source command to test sources" +sidebar_label: 'Run tests on all sources' +id: testing-sources + --- To run tests on all sources, use the following command: -``` +```shell $ dbt test --select source:* ``` + (You can also use the `-s` shorthand here instead of `--select`) To run tests on one source (and all of its tables): -``` +```shell $ dbt test --select source:jaffle_shop ``` And, to run tests on one source only: -``` +```shell $ dbt test --select source:jaffle_shop.orders ``` @@ -25,8 +30,8 @@ Yep, we know this syntax is a little less than ideal, so we're hoping to improve :::info Node selection syntax -In dbt 0.21.0, the node selection syntax was standardised (https://github.com/dbt-labs/dbt-core/pull/3791) to use `--select` everywhere. Before this, some commands like `dbt run` and `dbt test` used `--models` instead. - +In dbt 0.21.0, the node selection syntax [was standardized](https://github.com/dbt-labs/dbt-core/pull/3791) to use `--select` everywhere. Before this, some commands like `dbt run` and `dbt test` used `--models` instead. + Older versions still show an error because [argparse](https://docs.python.org/3/library/argparse.html#allow-abbrev) is expanding `--select` to `--selector`, which is a different flag. To fix this issue, either upgrade to dbt 0.21.0 or higher, or use `--models` instead of `--select`. ::: diff --git a/website/docs/faqs/uniqueness-two-columns.md b/website/docs/faqs/Tests/uniqueness-two-columns.md similarity index 86% rename from website/docs/faqs/uniqueness-two-columns.md rename to website/docs/faqs/Tests/uniqueness-two-columns.md index 9df2366dd9a..27a49f352fa 100644 --- a/website/docs/faqs/uniqueness-two-columns.md +++ b/website/docs/faqs/Tests/uniqueness-two-columns.md @@ -1,5 +1,9 @@ --- title: Can I test the uniqueness of two columns? +description: "Options to test uniqueness of two columns" +sidebar_label: 'Test the uniqueness of two columns' +id: uniqueness-two-columns + --- Yes, There's a few different options. @@ -69,7 +73,7 @@ models: #### 3. Use the `dbt_utils.unique_combination_of_columns` test -This is especially useful for large datasets since it is more performant. Check out the docs on [packages](package-management) for more information. +This is especially useful for large datasets since it is more performant. Check out the docs on [packages](/docs/build/packages) for more information. diff --git a/website/docs/faqs/when-to-test.md b/website/docs/faqs/Tests/when-to-test.md similarity index 72% rename from website/docs/faqs/when-to-test.md rename to website/docs/faqs/Tests/when-to-test.md index 63585a333e8..fc97f98000b 100644 --- a/website/docs/faqs/when-to-test.md +++ b/website/docs/faqs/Tests/when-to-test.md @@ -1,5 +1,9 @@ --- title: When should I run my tests? +description: "Run tests in production or when writing new code" +sidebar_label: 'When to run tests' +id: when-to-test + --- You should run your tests whenever you are writing new code (to ensure you haven't broken any existing models by changing SQL), and whenever you run your transformations in production (to ensure that your assumptions about your source data are still valid). diff --git a/website/docs/faqs/Troubleshooting/access-gdrive-credential.md b/website/docs/faqs/Troubleshooting/access-gdrive-credential.md new file mode 100644 index 00000000000..ca73c5c2631 --- /dev/null +++ b/website/docs/faqs/Troubleshooting/access-gdrive-credential.md @@ -0,0 +1,24 @@ +--- +title: I'm receiving a "Permission denied while getting Drive credential" error when trying to query from Google Drive? +description: "Grant BigQuery service account access" +sidebar_label: 'Error when trying to query from Google Drive' +id: access-gdrive-credential + +--- + +If you're seeing the below error when you try to query a dataset from a Google Drive document in the IDE, the IDE due to the below error message, we'll do our best to get you unstuck with the below steps! + +``` +Access denied: BigQuery BigQuery: Permission denied while getting Drive credentials +``` + +Usually this errors indicates that you haven't granted the BigQuery service account access to the specific Google Drive document. If you're seeing this error, try giving the service account (client email seen [here](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-bigquery)) you are using for your BigQuery connection in dbt Cloud, permission to your Google Drive or Google Sheet. You'll want to do this directly in your Google Document and click the 'share' button and enter the client email there. + +If you are experiencing this error when using oAuth, and you have verified your access to the Google Sheet, you may need to grant permissions for gcloud to access Google Drive: + +``` +gcloud auth application-default login --scopes=openid,https://www.googleapis.com/auth/userinfo.email,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/sqlservice.login,https://www.googleapis.com/auth/drive +``` +For more info see the [gcloud auth application-default documentation](https://cloud.google.com/sdk/gcloud/reference/auth/application-default/login) + +If you've tried the steps above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Troubleshooting/access_token_error.md b/website/docs/faqs/Troubleshooting/access_token_error.md new file mode 100644 index 00000000000..85be1fcd9dc --- /dev/null +++ b/website/docs/faqs/Troubleshooting/access_token_error.md @@ -0,0 +1,21 @@ +--- +title: I'm receiving an `access_token` error when trying to run queries in the IDE. +description: "Reauthenticate warehouse when seeing `access_token` error" +sidebar_label: '`access_token` error in the IDE' +id: access_token_error + +--- + +If you're seeing the below error when you try to run queries in the IDE, have no fear - we'll do our best to get you unstuck with the following steps! + + `access_token` + +In order to resolve the issue, you'll want to: + +1. Go to you Profile (upper right corner of dbt Cloud) +2. Click on the correct warehouse connection under "Credentials" +3. Click "Connect Snowflake Account" green button in the "Development Credentials" section. This drives you through reauthentication through the SSO flow. + +If you've tried the step above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! + + diff --git a/website/docs/faqs/dispatch-could-not-find-package.md b/website/docs/faqs/Troubleshooting/dispatch-could-not-find-package.md similarity index 60% rename from website/docs/faqs/dispatch-could-not-find-package.md rename to website/docs/faqs/Troubleshooting/dispatch-could-not-find-package.md index dea47e27f98..8b9f6c15367 100644 --- a/website/docs/faqs/dispatch-could-not-find-package.md +++ b/website/docs/faqs/Troubleshooting/dispatch-could-not-find-package.md @@ -1,10 +1,18 @@ --- -title: "[Error] Could not find package 'my_project'" +title: "[Error] Could not find my_project package" +description: "Macros missing from packages" +sidebar_label: 'Could not find package error' +id: dispatch-could-not-find-package + --- If a package name is included in the `search_order` of a project-level `dispatch` config, dbt expects that package to contain macros which are viable candidates for dispatching. If an included package does not contain _any_ macros, dbt will raise an error like: -``` + +```shell Compilation Error In dispatch: Could not find package 'my_project' ``` + This does not mean the package or root project is missing—it means that any macros from it are missing, and so it is missing from the search spaces available to `dispatch`. + +If you've tried the step above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Troubleshooting/git-revlist-error.md b/website/docs/faqs/Troubleshooting/git-revlist-error.md new file mode 100644 index 00000000000..e90c2729ec5 --- /dev/null +++ b/website/docs/faqs/Troubleshooting/git-revlist-error.md @@ -0,0 +1,31 @@ +--- +title: I'm receiving a git rev-list master error in the IDE? +description: "Primary branch not recognized" +sidebar_label: 'git rev-list master error in the IDE' +id: git-revlist-error +--- + +If you're unable to access the IDE due to the below error message, we'll do our best to get you unstuck with the below steps! + +```shell +git rev-list master..origin/main --count +fatal: ambiguous argument 'master..origin/main': unknown revision or path not in the working tree. +Use '--' to separate paths from revisions, like this: +'git [...] -- [...]' +``` + +Usually this error indicates that the "main" branch name has changed or it is possible that dbt Cloud was unable to determine what your primary branch was. No worries, we have a few workarounds for you to try: + +**Workaround 1** +Take a look at your Environment Settings - If you **do not** have a custom branch filled in your Environment Settings: + +1. Disconnect and reconnect your repository [connection](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url) on your Project Settings page. This should then allow dbt Cloud to pick up that the "main" branch is now called `main` +2. In the Environment Settings, set the custom branch to 'master' and refresh the IDE + +**Workaround 2** +Take a look at your Environment Settings - If you **do** have a custom branch filled in your Environment Settings: + +1. Disconnecting and reconnecting your repository [connection](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url) on your Project Settings page. This should then allow dbt Cloud to pick up that the "main" branch is now called `main` +2. In the Environment Settings, remove the custom branch and refresh the IDE + +If you've tried the workarounds above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Troubleshooting/gitignore.md b/website/docs/faqs/Troubleshooting/gitignore.md new file mode 100644 index 00000000000..47c7500e662 --- /dev/null +++ b/website/docs/faqs/Troubleshooting/gitignore.md @@ -0,0 +1,26 @@ +--- +title: Why can't I checkout a branch or create a new branch? +description: "Add or fill in gitignore file" +sidebar_label: 'Unable to checkout or create branch' +id: gitignore +--- + +If you're finding yourself unable to revert changes, check out a branch or click commit - this is usually do to your project missing a .[gitignore](https://github.com/dbt-labs/dbt-starter-project/blob/main/.gitignore) file OR your gitignore file doesn't contain the necessary content inside the folder. + +This is what causes that 'commit' git action button to display. No worries though - to fix this, you'll need to complete the following steps in order: + +1. In the Cloud IDE, add the missing .gitignore file or contents to your project. You'll want to make sure the .gitignore file includes the following: + + ```shell + target/ + dbt_modules/ + dbt_packages/ + logs/ + ``` + +2. Once you've added that, make sure to save and commit. + +3. Navigate to the same branch in your remote repository (which can be accessed directly through your git provider's web interface) and delete the logs, target, and dbt_modules/dbt_packages folders. + +4. Go back into the Cloud IDE and reclone your repository. This can be done by clicking on the green "ready" in the bottom right corner of the IDE (next to the command bar), and then clicking the orange "reclone repo" button in the pop up. + diff --git a/website/docs/faqs/Troubleshooting/gitlab-authentication.md b/website/docs/faqs/Troubleshooting/gitlab-authentication.md new file mode 100644 index 00000000000..7e7177b022a --- /dev/null +++ b/website/docs/faqs/Troubleshooting/gitlab-authentication.md @@ -0,0 +1,29 @@ +--- +title: I'm seeing a Gitlab authentication out of date error loop +description: "GitLab and dbt Cloud deploy key mismatch " +sidebar_label: 'GitLab authentication out of date' +id: gitlab-authentication +--- + +If you're seeing a 'GitLab Authentication is out of date' 500 server error page - this usually occurs when the deploy key in the repository settings in both dbt Cloud and GitLab do not match. + +No worries - this is a current issue the dbt Labs team is working on and we have a few workarounds for you to try: + +### 1st Workaround + +1. Disconnect repo from project in dbt Cloud. +2. Go to Gitlab and click on Settings > Repository. +3. Under Repository Settings, remove/revoke active dbt Cloud deploy tokens and deploy keys. +4. Attempt to reconnect your repository via dbt Cloud. +5. You would then need to check Gitlab to make sure that the new deploy key is added. +6. Once confirmed that it's added, refresh dbt Cloud and try developing once again. + +### 2nd Workaround + +1. Keep repo in project as is -- don't disconnect. +2. Copy the deploy key generated in dbt Cloud. +3. Go to Gitlab and click on Settings > Repository. +4. Under Repository Settings, manually add to your Gitlab project deploy key repo (with `Grant write permissions` box checked). +5. Go back to dbt Cloud, refresh your page and try developing again. + +If you've tried the workarounds above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Troubleshooting/nonetype-ide-error.md b/website/docs/faqs/Troubleshooting/nonetype-ide-error.md new file mode 100644 index 00000000000..85e0c67ec6a --- /dev/null +++ b/website/docs/faqs/Troubleshooting/nonetype-ide-error.md @@ -0,0 +1,22 @@ +--- +title: I'm receiving a NoneType object has no attribute error in the IDE? +description: "Copy SSH key to your warehouse" +sidebar_label: 'NoneType error in the IDE' +id: nonetype-ide-error + +--- + +If you're unable to access the IDE due to the below error message, we'll do our best to get you unstuck with the below steps! + +```shell +NoneType object has no attribute +enumerate_fields' +``` + +Usually this errors indicates that you tried connecting your database via [SSH tunnel](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-via-an-ssh-tunnel). If you're seeing this error, double-check you have supplied the following items: + +- the hostname +- username +- port of bastion server + +If you've tried the step above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Troubleshooting/partial-parsing-error.md b/website/docs/faqs/Troubleshooting/partial-parsing-error.md new file mode 100644 index 00000000000..3f37928771f --- /dev/null +++ b/website/docs/faqs/Troubleshooting/partial-parsing-error.md @@ -0,0 +1,11 @@ +--- +title: "I'm getting a \"Partial parsing enabled: 1 files deleted, 0 files added, 2 files changed\" compilation error in dbt Cloud?" +description: "Delete partial_parse file" +sidebar_label: 'partial_parse error in the IDE' +id: partial-parsing-error + +--- + +If you're receiving this error, try deleting the `target/partial_parse.msgpack` file from your project and refresh your IDE. + +If you've tried the workaround above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Troubleshooting/runtime-error-could-not-find-profile.md b/website/docs/faqs/Troubleshooting/runtime-error-could-not-find-profile.md new file mode 100644 index 00000000000..7d7ad21e4f6 --- /dev/null +++ b/website/docs/faqs/Troubleshooting/runtime-error-could-not-find-profile.md @@ -0,0 +1,28 @@ +--- +title: I'm receiving a Runtime Error Could not find profile named 'user' error? +description: "Re-authorize your credentials on Profile Settings" +sidebar_label: '"Could not find profile named user" error in the IDE' +id: runtime-error-could-not-find-profile + +--- + +If you're unable to access the IDE due to the below error message, we'll do our best to get you unstuck with the below steps! + +```shell +Running with dbt=0.21.0 +Encountered an error while reading the project: + ERROR: Runtime Error + Could not find profile named 'user' +Runtime Error + Could not run dbt' +``` + +Usually this errors indicates that there is an issue with missing/stale credentials/authentication. No worries, we have a few workarounds for you to try: + +**In the IDE:** +If this is happening in the IDE, you'll want to navigate to the Profile settings where your development credentials are configured. Once you're there, you'll need to either re-enter or re-authorize your credentials in order to get around this error message. + +**In a job:** +If this is happening in a job, it might be that you made some sort of change to the deployment environment in which the job is configured and did not re-enter your deployment credentials upon saving those changes. To fix this, you'll need to go back into the deployment environment settings, re-enter your credentials (either the private key/private key passphrase or the username and password), and kick off a new job run. + +If you've tried the step above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/Troubleshooting/runtime-packages.yml.md b/website/docs/faqs/Troubleshooting/runtime-packages.yml.md new file mode 100644 index 00000000000..14e87be41d9 --- /dev/null +++ b/website/docs/faqs/Troubleshooting/runtime-packages.yml.md @@ -0,0 +1,28 @@ +--- +title: Why am I receiving a Runtime Error in my packages? +description: "Update dbt_utils package in packages.yml file" +sidebar_label: 'Runtime error in packages.yml file' +id: runtime-packages.yml + +--- + +If you're receiving the runtime error below in your packages.yml folder, it may be due to an old version of your dbt_utils package that isn't compatible with your current dbt Cloud version. + +```shell +Running with dbt=xxx +Runtime Error + Failed to read package: Runtime Error + Invalid config version: 1, expected 2 + Error encountered in dbt_utils/dbt_project.yml + ``` + +Try updating the old version of the dbt_utils package in your packages.yml to the latest version found in the [dbt hub](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/): + +```shell +packages: +- package: dbt-labs/dbt_utils + +version: xxx +``` + +If you've tried the workaround above and are still experiencing this behavior - reach out to the Support team at support@getdbt.com and we'll be happy to help! diff --git a/website/docs/faqs/sql-errors.md b/website/docs/faqs/Troubleshooting/sql-errors.md similarity index 69% rename from website/docs/faqs/sql-errors.md rename to website/docs/faqs/Troubleshooting/sql-errors.md index 3e6b84b5842..b5101d3251a 100644 --- a/website/docs/faqs/sql-errors.md +++ b/website/docs/faqs/Troubleshooting/sql-errors.md @@ -1,11 +1,15 @@ --- -title: What happens if the SQL in my query is bad? +title: What happens if the SQL in my query is bad or I get a database error? +description: "Use error message and logs to debug" +sidebar_label: 'How to debug SQL or database error' +id: sql-errors + --- -Or: -### I got a Database Error, what does that mean? + If there's a mistake in your SQL, dbt will return the error that your database returns. -```shell-session + +```shell $ dbt run --select customers Running with dbt=0.15.0 Found 3 models, 9 tests, 0 snapshots, 0 analyses, 133 macros, 0 operations, 0 seed files, 0 sources @@ -20,10 +24,10 @@ Found 3 models, 9 tests, 0 snapshots, 0 analyses, 133 macros, 0 operations, 0 se Completed with 1 error and 0 warnings: Database Error in model customers (models/customers.sql) - Syntax error: Expected ")" but got identifier `grand-highway-265418` at [13:15] + Syntax error: Expected ")" but got identifier `your-info-12345` at [13:15] compiled SQL at target/run/jaffle_shop/customers.sql Done. PASS=0 WARN=0 ERROR=1 SKIP=0 TOTAL=1 ``` -Any models downstream of this model will also be skipped. Use the error message and the [compiled SQL](faqs/checking-logs.md) to debug any errors. +Any models downstream of this model will also be skipped. Use the error message and the [compiled SQL](/faqs/Runs/checking-logs) to debug any errors. diff --git a/website/docs/faqs/Troubleshooting/unused-model-configurations.md b/website/docs/faqs/Troubleshooting/unused-model-configurations.md new file mode 100644 index 00000000000..ba5506c260f --- /dev/null +++ b/website/docs/faqs/Troubleshooting/unused-model-configurations.md @@ -0,0 +1,10 @@ +--- +title: I got an "unused model configurations" error message, what does this mean? +description: "Model configurations error message" +sidebar_label: 'unused model configurations error' +id: unused-model-configurations + +--- +You might have forgotten to nest your configurations under your project name, or you might be trying to apply configurations to a directory that doesn't exist. + +Check out this [article](https://discourse.getdbt.com/t/faq-i-got-an-unused-model-configurations-error-message-what-does-this-mean/112) to understand more. diff --git a/website/docs/faqs/bq-impersonate-service-account-setup.md b/website/docs/faqs/Warehouse/bq-impersonate-service-account-setup.md similarity index 84% rename from website/docs/faqs/bq-impersonate-service-account-setup.md rename to website/docs/faqs/Warehouse/bq-impersonate-service-account-setup.md index d57e23b32b5..60db41095e5 100644 --- a/website/docs/faqs/bq-impersonate-service-account-setup.md +++ b/website/docs/faqs/Warehouse/bq-impersonate-service-account-setup.md @@ -1,5 +1,9 @@ --- title: How can I set up the right permissions in BigQuery? +description: "Use service account to set up permissions in BigQuery" +sidebar_label: 'Setting up permissions in BigQuery"' +id: bq-impersonate-service-account-setup + --- To use this functionality, first create the service account you want to @@ -16,4 +20,4 @@ the [IAM Service Account Credentials API](https://console.cloud.google.com/apis/ Enabling the API and granting the role are eventually consistent operations, taking up to 7 minutes to fully complete, but usually fully propagating within 60 seconds. Give it a few minutes, then add the `impersonate_service_account` -option to your BigQuery profile configuration. \ No newline at end of file +option to your BigQuery profile configuration. diff --git a/website/docs/faqs/bq-impersonate-service-account-why.md b/website/docs/faqs/Warehouse/bq-impersonate-service-account-why.md similarity index 85% rename from website/docs/faqs/bq-impersonate-service-account-why.md rename to website/docs/faqs/Warehouse/bq-impersonate-service-account-why.md index ffde684fed4..e56485e18e7 100644 --- a/website/docs/faqs/bq-impersonate-service-account-why.md +++ b/website/docs/faqs/Warehouse/bq-impersonate-service-account-why.md @@ -1,5 +1,9 @@ --- title: Why would I want to impersonate a service account? +description: "Build models using dedicated service account" +sidebar_label: 'Why impersonate a service account' +id: bq-impersonate-service-account-why + --- You may want your models to be built using a dedicated service account that has @@ -15,4 +19,4 @@ This allows you to reap the advantages of using federated identity for developer directly, and without needing to create separate service account and keys for each user. It also allows you to completely eliminate the need for service account keys in CI as long as your CI is running on GCP (Cloud Build, Jenkins, -GitLab/Github Runners, etc). \ No newline at end of file +GitLab/Github Runners, etc). diff --git a/website/docs/faqs/connecting-to-two-dbs-not-allowed.md b/website/docs/faqs/Warehouse/connecting-to-two-dbs-not-allowed.md similarity index 89% rename from website/docs/faqs/connecting-to-two-dbs-not-allowed.md rename to website/docs/faqs/Warehouse/connecting-to-two-dbs-not-allowed.md index fb9ea905fa8..f7c546b3648 100644 --- a/website/docs/faqs/connecting-to-two-dbs-not-allowed.md +++ b/website/docs/faqs/Warehouse/connecting-to-two-dbs-not-allowed.md @@ -1,8 +1,13 @@ --- title: Can I connect my dbt project to two databases? +description: "Prepare your warehouse for dbt transformations" +sidebar_label: 'Can you connect dbt project to two databases?' +id: connecting-to-two-dbs-not-allowed + --- The meaning of the term 'database' varies with each major warehouse manager. Hence, the answer to "can a dbt project connect to more than one database?" depends on the warehouse used in your tech stack. + * dbt projects connecting to warehouses like Snowflake or Bigquery—these empower one set of credentials to draw from all datasets or 'projects' available to an account—are _sometimes_ said to connect to more than one database. * dbt projects connecting to warehouses like Redshift and Postgres—these tie one set of credentials to one database—are said to connect to one database only. @@ -11,4 +16,3 @@ Sidestep the 'one database problem' by relying on thinking (i. Hence, instead of thinking "how do I connect my dbt project to two databases", ask "what loader services will best prepare our warehouse for dbt transformations." For more on the modern 'ELT-powered' data stack, see the "dbt and the modern BI stack" section of this [dbt blog post](https://blog.getdbt.com/what--exactly--is-dbt-/). - diff --git a/website/docs/faqs/database-privileges.md b/website/docs/faqs/Warehouse/database-privileges.md similarity index 87% rename from website/docs/faqs/database-privileges.md rename to website/docs/faqs/Warehouse/database-privileges.md index be5ea2ec0c0..73e0549f130 100644 --- a/website/docs/faqs/database-privileges.md +++ b/website/docs/faqs/Warehouse/database-privileges.md @@ -1,5 +1,9 @@ --- title: What privileges does my database user need to use dbt? +description: "Database privileges to use dbt" +sidebar_label: 'Database privileges to use dbt' +id: database-privileges + --- Your user will need to be able to: * `select` from raw data in your warehouse (i.e. data to be transformed) diff --git a/website/docs/faqs/Warehouse/loading-data.md b/website/docs/faqs/Warehouse/loading-data.md new file mode 100644 index 00000000000..3fb13f139b0 --- /dev/null +++ b/website/docs/faqs/Warehouse/loading-data.md @@ -0,0 +1,12 @@ +--- +title: How do I load data into my warehouse? +description: "Recommendations on tools to load data to warehouse" +sidebar_label: 'Recommendations on tools to get data into your warehouse' +id: loading-data + +--- +dbt assumes that you already have a copy of your data, in your . We recommend you use an off-the-shelf tool like [Stitch](https://www.stitchdata.com/) or [Fivetran](https://fivetran.com/) to get data into your warehouse. + +**Can dbt be used to load data?** + +No, dbt does not extract or load data. It focuses on the transformation step only. diff --git a/website/docs/faqs/Warehouse/sample-profiles.md b/website/docs/faqs/Warehouse/sample-profiles.md new file mode 100644 index 00000000000..57287a7d97b --- /dev/null +++ b/website/docs/faqs/Warehouse/sample-profiles.md @@ -0,0 +1,9 @@ +--- +title: What should my profiles.yml file look like for my warehouse? +description: "Go to Profile Set up to see the structure of a profile" +sidebar_label: 'Structure of profile for each warehouse' +id: sample-profiles + +--- + +The structure of a profile looks different on each warehouse. Check out the [Supported Data Platforms](supported-data-platforms) page, and navigate to the `Profile Setup` section for your warehouse. diff --git a/website/docs/faqs/checking-logs.md b/website/docs/faqs/checking-logs.md deleted file mode 100644 index edd84b9a2cb..00000000000 --- a/website/docs/faqs/checking-logs.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -title: How can I see the SQL that dbt is running? ---- -To check out the SQL that dbt is running, you can look in: -* dbt Cloud: - * Within the run output, click on a model name, and then select "Details" -* dbt CLI: - * The `target/compiled/` directory for compiled `select` statements - * The `target/run/` directory for compiled `create` statements - * The `logs/dbt.log` file for verbose logging. diff --git a/website/docs/faqs/cloud-vs-core.md b/website/docs/faqs/cloud-vs-core.md deleted file mode 100644 index 556264ae8f7..00000000000 --- a/website/docs/faqs/cloud-vs-core.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: What is the difference between dbt Core, the dbt CLI and dbt Cloud? ---- - -dbt Core is the software that takes a [dbt project](projects) (`.sql` and `.yml` files) and a command and then creates tables/views in your warehouse. dbt Core includes a command line interface (CLI) so that users can execute dbt commands using a terminal program. dbt Core is [open source](https://github.com/dbt-labs/dbt) and free to use. - -dbt Cloud is an application that helps teams use dbt. dbt Cloud provides a web-based IDE to develop dbt projects, a purpose-built scheduler, and a way to share dbt documentation with your team. dbt Cloud offers a number of features for free, as well as additional features in paid tiers (check out the pricing [here](https://www.getdbt.com/pricing/)). diff --git a/website/docs/faqs/create-a-schema.md b/website/docs/faqs/create-a-schema.md deleted file mode 100644 index 0ee9411cd87..00000000000 --- a/website/docs/faqs/create-a-schema.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: Do I need to create my target schema before running dbt? ---- -Nope! dbt will check if the schema exists when it runs. If the schema does not exist, dbt will create it for you. \ No newline at end of file diff --git a/website/docs/faqs/dbt-source-freshness.md b/website/docs/faqs/dbt-source-freshness.md deleted file mode 100644 index 6eebf2a832b..00000000000 --- a/website/docs/faqs/dbt-source-freshness.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -title: Are the results of freshness stored anywhere? ---- -Yes! - -The `dbt source freshness` command will output a pass/warning/error status for each selected in the freshness snapshot. - -Additionally, dbt will write the freshness results to a file in the `target/` directory called `sources.json` by default. You can also override this destination, use the `-o` flag to the `dbt source freshness` command. - -Lastly, after enabling source freshness within a job, configure [Artifacts](docs/dbt-cloud/using-dbt-cloud/artifacts) in your account settings to enable 'Data Sources' to appear as a new dropdown in the hamburger menu, where you can see current status for source freshness. diff --git a/website/docs/faqs/dbt-specific-jinja.md b/website/docs/faqs/dbt-specific-jinja.md deleted file mode 100644 index 828f900e1cc..00000000000 --- a/website/docs/faqs/dbt-specific-jinja.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: What parts of Jinja are dbt-specific? ---- - -There are certain expressions that are specific to dbt — these are documented in the [Jinja function reference](dbt-jinja-functions) section of these docs. Further, docs blocks, snapshots, and materializations are custom Jinja _blocks_ that exist only in dbt. diff --git a/website/docs/faqs/docs-for-multiple-projects.md b/website/docs/faqs/docs-for-multiple-projects.md deleted file mode 100644 index 0966ec8d480..00000000000 --- a/website/docs/faqs/docs-for-multiple-projects.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: Can I render docs for multiple projects? ---- - -Yes! To do this, you'll need to create a "super project" that lists each project as a dependent [package](package-management) in a `packages.yml` file. Then run `dbt deps` to install the projects as packages, prior to running `dbt docs generate`. - -If you are going down the route of multiple projects, be sure to check out our advice [1](https://discourse.getdbt.com/t/should-i-have-an-organisation-wide-project-a-monorepo-or-should-each-work-flow-have-their-own/666) [2](https://discourse.getdbt.com/t/how-to-configure-your-dbt-repository-one-or-many/2121)on the topic. diff --git a/website/docs/faqs/failed-tests.md b/website/docs/faqs/failed-tests.md deleted file mode 100644 index 3b4a1751648..00000000000 --- a/website/docs/faqs/failed-tests.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -title: One of my tests failed, how can I debug it? ---- -To debug a failing test, find the SQL that dbt ran by: -* dbt Cloud: - * Within the test output, click on the failed test, and then select "Details" -* dbt CLI: - * Open the file path returned as part of the error message. - * Navigate to the `target/compiled/schema_tests` directory for all compiled test queries - -Copy the SQL into a query editor (in dbt Cloud, you can paste it into a new `Statement`), and run the query to find the records that failed. - diff --git a/website/docs/faqs/index.md b/website/docs/faqs/index.md deleted file mode 100644 index 0a5777f1e53..00000000000 --- a/website/docs/faqs/index.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -slug: all -id: all -title: List of FAQs ---- - - diff --git a/website/docs/faqs/loading-data.md b/website/docs/faqs/loading-data.md deleted file mode 100644 index 28d53210cf5..00000000000 --- a/website/docs/faqs/loading-data.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: How do I load data into my warehouse? ---- -dbt assumes that you already have a copy of your data, in your data warehouse. We recommend you use an off-the-shelf tool like [Stitch](https://www.stitchdata.com/) or [Fivetran](https://fivetran.com/) to get data into your warehouse. - -**Can dbt be used to load data?** - -No, dbt does not extract or load data. It focuses on the transformation step only. diff --git a/website/docs/faqs/long-descriptions.md b/website/docs/faqs/long-descriptions.md deleted file mode 100644 index c59f1b6b726..00000000000 --- a/website/docs/faqs/long-descriptions.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -title: How do I write long-form explanations in my descriptions? ---- -If you need more than a sentence to explain a model, you can: -1. Split your description over multiple lines ([yaml docs](https://yaml-multiline.info/)), like so: -```yml -version: 2 - -models: -- name: customers - description: > - Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod - tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, - quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo - consequat. -``` - -2. Use a [docs block](/documentation#using-docs-blocks) to write the description in a Markdown file. diff --git a/website/docs/faqs/prerelease-docs.md b/website/docs/faqs/prerelease-docs.md deleted file mode 100644 index 2cc029bb3da..00000000000 --- a/website/docs/faqs/prerelease-docs.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: Why are there "prerelease" docs? ---- -We want to give beta testers the information they need to try out new features, without adding confusion to the current docs site. This is served from a long-lived `next` branch of the docs repo. Feedback on prerelease docs is also welcome—use the "Edit this page" feature at the bottom. diff --git a/website/docs/faqs/profile-name.md b/website/docs/faqs/profile-name.md deleted file mode 100644 index b72260baa11..00000000000 --- a/website/docs/faqs/profile-name.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: What should I name my profile? ---- -We typically use a company name for a profile name, and then use targets to differentiate between `dev` and `prod`. Check out the docs on [managing environments](managing-environments) for more information. diff --git a/website/docs/faqs/project-name.md b/website/docs/faqs/project-name.md deleted file mode 100644 index fa893d882db..00000000000 --- a/website/docs/faqs/project-name.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: What should I name my dbt project? ---- -The name of your company (in `snake_case`) often makes for a good [project name](reference/project-configs/name.md). diff --git a/website/docs/faqs/reference-models-in-another-project.md b/website/docs/faqs/reference-models-in-another-project.md deleted file mode 100644 index 02c1e21aa24..00000000000 --- a/website/docs/faqs/reference-models-in-another-project.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: How can I reference models or macros in another project? ---- - -You can use [packages](package-management) to add another project to your dbt -project, including other projects you've created. Check out the [docs](package-management) -for more information! diff --git a/website/docs/faqs/sample-profiles.md b/website/docs/faqs/sample-profiles.md deleted file mode 100644 index 8c058512291..00000000000 --- a/website/docs/faqs/sample-profiles.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: What should my profiles.yml file look like for my warehouse? ---- - -The structure of a profile looks different on each warehouse. Check out the [supported databases](available-adapters) page, and navigate to the `Profile Setup` section for your warehouse. diff --git a/website/docs/faqs/sharing-documentation.md b/website/docs/faqs/sharing-documentation.md deleted file mode 100644 index dafe8c95f76..00000000000 --- a/website/docs/faqs/sharing-documentation.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: How do I share my documentation with my team members? ---- -If you're using dbt Cloud to deploy your project, and have the [Team Plan](https://www.getdbt.com/pricing/), you can have up to 50 read only users, who will be able access the documentation for your project. diff --git a/website/docs/faqs/snapshotting-freshness-for-one-source.md b/website/docs/faqs/snapshotting-freshness-for-one-source.md deleted file mode 100644 index 05bce561184..00000000000 --- a/website/docs/faqs/snapshotting-freshness-for-one-source.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -title: How do I snapshot freshness for one source only? ---- - - -Use the `--select` flag to snapshot freshness for specific sources. Eg: - -``` -# Snapshot freshness for all Snowplow tables: -$ dbt source freshness --select jaffle_shop - -# Snapshot freshness for a particular source : -$ dbt source freshness --select jaffle_shop.orders - -# Snapshot freshness for multiple particular source tables: -$ dbt source freshness --select jaffle_shop.orders jaffle_shop.customers -``` - -See the [`source freshness` command reference](commands/source) for more information. diff --git a/website/docs/faqs/target-names.md b/website/docs/faqs/target-names.md deleted file mode 100644 index f01b0e4c399..00000000000 --- a/website/docs/faqs/target-names.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: What should I name my target? ---- - -We typically use targets to differentiate between development and production runs of dbt, naming the targets `dev` and `prod` respectively. Check out the docs on [managing environments](managing-environments) for more information. diff --git a/website/docs/faqs/test-sources.md b/website/docs/faqs/test-sources.md deleted file mode 100644 index bf37fbf05d4..00000000000 --- a/website/docs/faqs/test-sources.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: How do I run tests on sources only? ---- - -It is possible! You need to use the `source:` selection method: - -``` -$ dbt test --select source:* -``` - -Check out the [model selection syntax documentation](node-selection/test-selection-examples) for more operators and examples. diff --git a/website/docs/faqs/unused-model-configurations.md b/website/docs/faqs/unused-model-configurations.md deleted file mode 100644 index d4d968bada1..00000000000 --- a/website/docs/faqs/unused-model-configurations.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: I got an "unused model configurations" error message, what does this mean? ---- -You might have forgotten to nest your configurations under your project name, or you might be trying to apply configurations to a directory that doesn't exist. Check out this [article](https://discourse.getdbt.com/t/faq-i-got-an-unused-model-configurations-error-message-what-does-this-mean/112) to understand more. diff --git a/website/docs/faqs/yaml-file-extension.md b/website/docs/faqs/yaml-file-extension.md deleted file mode 100644 index 1fc6a6f7dd0..00000000000 --- a/website/docs/faqs/yaml-file-extension.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: Can I use a yaml file extension? ---- - -No. At present, dbt will only search for files with a `.yml` file extension. In a future release of dbt, dbt will also search for files with a `.yaml` file extension. diff --git a/website/docs/guides/advanced/adapter-development/1-what-are-adapters.md b/website/docs/guides/advanced/adapter-development/1-what-are-adapters.md new file mode 100644 index 00000000000..08769a9bc54 --- /dev/null +++ b/website/docs/guides/advanced/adapter-development/1-what-are-adapters.md @@ -0,0 +1,100 @@ +--- +title: "What are adapters? Why do we need them?" +id: "1-what-are-adapters" +--- + +Adapters are an essential component of dbt. At their most basic level, they are how dbt Core connects with the various supported data platforms. At a higher-level, dbt Core adapters strive to give analytics engineers more transferrable skills as well as standardize how analytics projects are structured. Gone are the days where you have to learn a new language or flavor of SQL when you move to a new job that has a different data platform. That is the power of adapters in dbt Core. + + Navigating and developing around the nuances of different databases can be daunting, but you are not alone. Visit [#adapter-ecosystem](https://getdbt.slack.com/archives/C030A0UF5LM) Slack channel for additional help beyond the documentation. + +## All databases are not the same + +There's a tremendous amount of work that goes into creating a database. Here is a high-level list of typical database layers (from the outermost layer moving inwards): +- SQL API +- Client Library / Driver +- Server Connection Manager +- Query parser +- Query optimizer +- Runtime +- Storage Access Layer +- Storage + +There's a lot more there than just SQL as a language. Databases (and data warehouses) are so popular because you can abstract away a great deal of the complexity from your brain to the database itself. This enables you to focus more on the data. + +dbt allows for further abstraction and standardization of the outermost layers of a database (SQL API, client library, connection manager) into a framework that both: + - Opens database technology to less technical users (a large swath of a DBA's role has been automated, similar to how the vast majority of folks with websites today no longer have to be "[webmasters](https://en.wikipedia.org/wiki/Webmaster)"). + - Enables more meaningful conversations about how data warehousing should be done. + +This is where dbt adapters become critical. + +## What needs to be adapted? + +dbt adapters are responsible for _adapting_ dbt's standard functionality to a particular database. Our prototypical database and adapter are PostgreSQL and dbt-postgres, and most of our adapters are somewhat based on the functionality described in dbt-postgres. + +Connecting dbt to a new database will require a new adapter to be built or an existing adapter to be extended. + +The outermost layers of a database map roughly to the areas in which the dbt adapter framework encapsulates inter-database differences. + +### SQL API + +Even amongst ANSI-compliant databases, there are differences in the SQL grammar. +Here are some categories and examples of SQL statements that can be constructed differently: + + +| Category | Area of differences | Examples | +|----------------------------------------------|--------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Statement syntax | The use of `IF EXISTS` |
  • `IF EXISTS, DROP TABLE`
  • `DROP
  • IF EXISTS` | +| Workflow definition & semantics | Incremental updates |
  • `MERGE`
  • `DELETE; INSERT`
  • | +| Relation and column attributes/configuration | Database-specific materialization configs |
  • `DIST = ROUND_ROBIN` (Synapse)
  • `DIST = EVEN` (Redshift)
  • | +| Permissioning | Grant statements that can only take one grantee at a time vs those that accept lists of grantees |
  • `grant SELECT on table dinner.corn to corn_kid, everyone`
  • `grant SELECT on table dinner.corn to corn_kid; grant SELECT on table dinner.corn to everyone`
  • | + +### Python Client Library & Connection Manager + +The other big category of inter-database differences comes with how the client connects to the database and executes queries against the connection. To integrate with dbt, a data platform must have a pre-existing python client library or support ODBC, using a generic python library like pyodbc. + +| Category | Area of differences | Examples | +|------------------------------|-------------------------------------------|-------------------------------------------------------------------------------------------------------------| +| Credentials & authentication | Authentication |
  • Username & password
  • MFA with `boto3` or Okta token
  • | +| Connection opening/closing | Create a new connection to db |
  • `psycopg2.connect(connection_string)`
  • `google.cloud.bigquery.Client(...)`
  • | +| Inserting local data | Load seed .`csv` files into Python memory |
  • `google.cloud.bigquery.Client.load_table_from_file(...)` (BigQuery)
  • `INSERT ... INTO VALUES ...` prepared statement (most other databases)
  • | + + +## How dbt encapsulates and abstracts these differences + +Differences between databases are encoded into discrete areas: + +| Components | Code Path | Function | +|------------------|---------------------------------------------------|-------------------------------------------------------------------------------| +| Python Classes | `adapters/` | Configuration (See above [Python classes](##python classes) | +| Macros | `include//macros/adapters/` | SQL API & statement syntax (for example, how to create schema or how to get table info) | +| Materializations | `include//macros/materializations/` | Table/view/snapshot/ workflow definitions | + + +### Python Classes + +These classes implement all the methods responsible for: +- Connecting to a database and issuing queries. +- Providing dbt with database-specific configuration information. + +| Class | Description | +|--------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| AdapterClass | High-level configuration type conversion and any database-specific python methods needed | +| AdapterCredentials | Typed dictionary of possible profiles and associated methods | +| AdapterConnectionManager | All the methods responsible for connecting to a database and issuing queries | +| AdapterRelation | How relation names should be rendered, printed, and quoted. Do relation names use all three parts? `catalog.model_name` (two-part name) or `database.schema.model_name` (three-part name) | +| AdapterColumn | How names should be rendered, and database-specific properties | + +### Macros + +A set of *macros* responsible for generating SQL that is compliant with the target database. + +### Materializations + +A set of *materializations* and their corresponding helper macros defined in dbt using jinja and SQL. They codify for dbt how model files should be persisted into the database. + +## Adapter Architecture + + +Below is a diagram of how dbt-postgres, the adapter at the center of dbt-core, works. + + diff --git a/website/docs/guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter.md b/website/docs/guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter.md new file mode 100644 index 00000000000..271108a620c --- /dev/null +++ b/website/docs/guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter.md @@ -0,0 +1,52 @@ +--- +title: "Prerequisites for a new adapter" +id: "2-prerequisites-for-a-new-adapter" +--- + +To learn what an adapter is and they role they serve, see [What are adapters?](1-what-are-adapters) + +It is very important that make sure that you have the right skills, and to understand the level of difficulty required to make an adapter for your data platform. + +## Pre-Requisite Data Warehouse Features + +The more you can answer Yes to the below questions, the easier your adapter development (and user-) experience will be. See the [New Adapter Information Sheet wiki](https://github.com/dbt-labs/dbt-core/wiki/New-Adapter-Information-Sheet) for even more specific questions. + +### Training +- the developer (and any product managers) ideally will have substantial experience as an end-user of dbt. If not, it is highly advised that you at least take the [dbt Fundamentals](https://courses.getdbt.com/courses/fundamentals) and [Advanced Materializations](https://courses.getdbt.com/courses/advanced-materializations) course. + +### Database +- Does the database complete transactions fast enough for interactive development? +- Can you execute SQL against the data platform? +- Is there a concept of schemas? +- Does the data platform support ANSI SQL, or at least a subset? +### Driver / Connection Library +- Is there a Python-based driver for interacting with the database that is db API 2.0 compliant (e.g. Psycopg2 for Postgres, pyodbc for SQL Server) +- Does it support: prepared statements, multiple statements, or single sign on token authorization to the data platform? + +### Open source software +- Does your organization have an established process for publishing open source software? + + +It is easiest to build an adapter for dbt when the following the /platform in question has: +- a conventional ANSI-SQL interface (or as close to it as possible), +- a mature connection library/SDK that uses ODBC or Python DB 2 API, and +- a way to enable developers to iterate rapidly with both quick reads and writes + + +## Maintaining your new adapter + +When your adapter becomes more popular, and people start using it, you may quickly become the maintainer of an increasingly popular open source project. With this new role, comes some unexpected responsibilities that not only include code maintenance, but also working with a community of users and contributors. To help people understand what to expect of your project, you should communicate your intentions early and often in your adapter documentation or README. Answer questions like, Is this experimental work that people should use at their own risk? Or is this production-grade code that you're committed to maintaining into the future? + +### Keeping the code compatible with dbt Core + +New minor version releases of `dbt-core` may include changes to the Python interface for adapter plugins, as well as new or updated test cases. The maintainers of `dbt-core` will clearly communicate these changes in documentation and release notes, and they will aim for backwards compatibility whenever possible. + +Patch releases of `dbt-core` will _not_ include breaking changes to adapter-facing code. For more details, see ["About dbt Core versions"](core-versions). + +### Versioning and releasing your adapter + +We strongly encourage you to adopt the following approach when versioning and releasing your plugin: +- The minor version of your plugin should match the minor version in `dbt-core` (e.g. 1.1.x). +- Aim to release a new version of your plugin for each new minor version of `dbt-core` (once every three months). +- While your plugin is new, and you're iterating on features, aim to offer backwards compatibility and deprecation notices for at least one minor version. As your plugin matures, aim to leave backwards compatibility and deprecation notices in place until the next major version (dbt Core v2). +- Release patch versions of your plugins whenever needed. These patch releases should contain fixes _only_. diff --git a/website/docs/docs/contributing/building-a-new-adapter.md b/website/docs/guides/advanced/adapter-development/3-building-a-new-adapter.md similarity index 53% rename from website/docs/docs/contributing/building-a-new-adapter.md rename to website/docs/guides/advanced/adapter-development/3-building-a-new-adapter.md index b7320a6a007..9b24d0baaba 100644 --- a/website/docs/docs/contributing/building-a-new-adapter.md +++ b/website/docs/guides/advanced/adapter-development/3-building-a-new-adapter.md @@ -1,73 +1,62 @@ --- title: "Building a new adapter" -id: "building-a-new-adapter" +id: "3-building-a-new-adapter" --- -## What are adapters? +:::tip +Before you build your adapter, we strongly encourage you to first learn dbt as an end user, learn [what an adapter is and the role they serve](1-what-are-adapters), as well as [data platform prerequisites](2-prerequisites-for-a-new-adapter) +::: -dbt "adapters" are responsible for _adapting_ dbt's functionality to a given database. If you want to make dbt work with a new database, you'll probably need to build a new adapter, or extend an existing one. Adapters are comprised of three layers: -1. At the lowest level: An *adapter class* implementing all the methods responsible for connecting to a database and issuing queries. -2. In the middle: A set of *macros* responsible for generating SQL that is compliant with the target database. -3. (Optional) At the highest level: A set of *materializations* that tell dbt how to turn model files into persisted objects in the database. - -This guide will walk you through the first two steps, and provide some resources to help you validate that your new adapter is working correctly. Once the adapter is passing most of the functional tests (see ["Testing a new adapter"](testing-a-new-adapter) -), please let the community know that is available to use by adding the adapter to the [Available Adapters](docs/available-adapters) page by following the steps given in [Documenting your adapter](docs/contributing/documenting-a-new-adapter). +This guide will walk you through the first creating the necessary adapter classes and macros, and provide some resources to help you validate that your new adapter is working correctly. Once the adapter is passing most of the functional tests (see ["Testing a new adapter"](4-testing-a-new-adapter) +), please let the community know that is available to use by adding the adapter to the ["Supported Data Platforms"](supported-data-platforms) page by following the steps given in [Documenting your adapter](5-documenting-a-new-adapter). For any questions you may have, don't hesitate to ask in the [#adapter-ecosystem](https://getdbt.slack.com/archives/C030A0UF5LM) Slack channel. The community is very helpful and likely has experienced a similar issue as you. -## Pre-Requisite Data Warehouse Features - -The more you can answer Yes to the below questions, the easier your adapter development (and user-) experience will be. See the [New Adapter Information Sheet wiki](https://github.com/dbt-labs/dbt-core/wiki/New-Adapter-Information-Sheet) for even more specific questions. - -### Training -- the developer (and any product managers) ideally will have substantial experience as an end-user of dbt. If not, it is highly advised that you at least take the [dbt Fundamentals](https://courses.getdbt.com/courses/fundamentals) and [Advanced Materializations](https://courses.getdbt.com/courses/advanced-materializations) course. - -### Database -- Does the database complete transactions fast enough for interactive development? -- Can you execute SQL against the data platform? -- Is there a concept of schemas? -- Does the data platform support ANSI SQL, or at least a subset? -### Driver / Connection Library -- Is there a Python-based driver for interacting with the database that is db API 2.0 compliant (e.g. Psycopg2 for Postgres, pyodbc for SQL Server) -- Does it support: prepared statements, multiple statements, or single sign on token authorization to the data platform? - -### Open source software -- Does your organization have an established process for publishing open source software? - +## Scaffolding a new adapter + To create a new adapter plugin from scratch, you can use the [dbt-database-adapter-scaffold](https://github.com/dbt-labs/dbt-database-adapter-scaffold) to trigger an interactive session which will generate a scaffolding for you to build upon. -It is easiest to build an adapter for dbt when the following the data warehouse/platform in question has: -- a conventional ANSI-SQL interface (or as close to it as possible), -- a mature connection library/SDK that uses ODBC or Python DB 2 API, and -- a way to enable developers to iterate rapidly with both quick reads and writes + Example usage: -## Scaffolding a new adapter + ``` + $ cookiecutter gh:dbt-labs/dbt-database-adapter-scaffold + ``` -dbt comes equipped with a script which will automate a lot of the legwork in building a new adapter. This script will generate a standard folder structure, set up the various import dependencies and references, and create namespace packages so the plugin can interact with dbt. You can find this script in the dbt repo in dbt's [scripts/](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/scripts/create_adapter_plugins.py) directory. +The generated boilerplate starting project will include a basic adapter plugin file structure, examples of macros, high level method descriptions, etc. -Example usage: +One of the most important choices you will make during the cookiecutter generation will revolve around the field for `is_sql_adapter` which is a boolean used to correctly apply imports for either a `SQLAdapter` or `BaseAdapter`. Knowing which you will need requires a deeper knowledge of your selected database but a few good guides for the choice are. +- Does your database have a complete SQL API? Can it perform tasks using SQL such as creating schemas, dropping schemas, querying an `information_schema` for metadata calls? If so, it is more likely to be a SQLAdapter where you set `is_sql_adapter` to `True`. +- Most adapters do fall under SQL adapters which is why we chose it as the default `True` value. +- It is very possible to build out a fully functional `BaseAdapter`. This will require a little more ground work as it doesn't come with some prebuilt methods the `SQLAdapter` class provides. See `dbt-bigquery` as a good guide. -``` -$ python create_adapter_plugins.py --sql --title-case=MyAdapter ./ myadapter -``` +## Implementation Details -You will get a folder named 'myadapter' in the local directory, with some subfolders and files created. Your adapter will be named 'MyAdapter' in the generated code - without `--title-case=MyAdapter` it would be 'Myadapter'. You can set other flags to specify dependencies, author, and package information as well. If your adapter implements SQL's `information_schema` (or something similar enough) and supports a cursor() method on its connections, you may pass the `--sql` flag to derive from the SQLAdapter, which is much easier to implement than the BaseAdapter! Compare dbt's native BigQuery adapter with its SnowflakeAdapter to get an idea of the difference between the two. +Regardless if you decide to use the cookiecutter template or manually create the plugin, this section will go over each method that is required to be implemented. The table below provides a high-level overview of the classes, methods, and macros you may have to define for your data platform. -This rest of this guide will assume that a SQLAdapter is being used. +| file | component | purpose | +|---------------------------------------------------|-------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `./setup.py` | `setup()` function | adapter meta-data (package name, version, author, homepage, etc) | +| `myadapter/dbt/adapters/myadapter/__init__.py` | `AdapterPlugin` | bundle all the information below into a dbt plugin | +| `myadapter/dbt/adapters/myadapter/connections.py` | `MyAdapterCredentials` class | parameters to connect to and configure the database, via a the chosen Python driver | +| `myadapter/dbt/adapters/myadapter/connections.py` | `MyAdapterConnectionManager` class | telling dbt how to interact with the database w.r.t opening/closing connections, executing queries, and fetching data. Effectively a wrapper around the db API or driver. | +| `myadapter/dbt/include/bigquery/` | a dbt project of macro "overrides" in the format of "myadapter__" | any differences in SQL syntax for regular db operations will be modified here from the global_project (e.g. "Create Table As Select", "Get all relations in the current schema", etc) | +| `myadapter/dbt/adapters/myadapter/impl.py` | `MyAdapterConfig` | database- and relation-level configs and | +| `myadapter/dbt/adapters/myadapter/impl.py` | `MyAdapterAdapter` | for changing _how_ dbt performs operations like macros and other needed Python functionality | +| `myadapter/dbt/adapters/myadapter/column.py` | `MyAdapterColumn` | for defining database-specific column such as datatype mappings | -### Editing setup.py +### Editing `setup.py` Edit the file at `myadapter/setup.py` and fill in the missing information. -You can skip this step if you passed the arguments for `email`, `url`, `author`, and `dependencies` to the script. If you plan on having nested macro folder structures, you may need to add entries to `package_data` so your macro source files get installed. +You can skip this step if you passed the arguments for `email`, `url`, `author`, and `dependencies` to the cookiecutter template script. If you plan on having nested macro folder structures, you may need to add entries to `package_data` so your macro source files get installed. ### Editing the connection manager Edit the connection manager at `myadapter/dbt/adapters/myadapter/connections.py`. This file is defined in the sections below. -### The Credentials class +#### The Credentials class -The credentials class defines all of the database-specific credentials (e.g. `username` and `password`) that users will need in the [connection profile](configure-your-profile) for your new adapter. Each credentials contract should subclass dbt.adapters.base.Credentials, and be implemented as a python dataclass. +The credentials class defines all of the database-specific credentials (e.g. `username` and `password`) that users will need in the [connection profile](/docs/supported-data-platforms) for your new adapter. Each credentials contract should subclass dbt.adapters.base.Credentials, and be implemented as a python dataclass. Note that the base class includes required database and schema fields, as dbt uses those values internally. @@ -93,7 +82,7 @@ class MyAdapterCredentials(Credentials): @property def type(self): return 'myadapter' - + @property def unique_field(self): """ @@ -135,17 +124,18 @@ class MyAdapterCredentials(Credentials): Then users can use `collection` OR `database` in their `profiles.yml`, `dbt_project.yml`, or `config()` calls to set the database. -### Connection methods +#### `ConnectionManager` class methods Once credentials are configured, you'll need to implement some connection-oriented methods. They are enumerated in the SQLConnectionManager docstring, but an overview will also be provided here. **Methods to implement:** -- open -- get_response -- cancel -- exception_handler +- `open` +- `get_response` +- `cancel` +- `exception_handler` +- `standardize_grants_dict` -#### open(cls, connection) +##### `open(cls, connection)` `open()` is a classmethod that gets a connection object (which could be in any state, but will have a `Credentials` object with the attributes you defined above) and moves it to the 'open' state. @@ -156,11 +146,11 @@ Generally this means doing the following: - on success: - set connection.state to `'open'` - set connection.handle to the handle object - - this is what must have a cursor() method that returns a cursor! + - this is what must have a `cursor()` method that returns a cursor! - on error: - set connection.state to `'fail'` - set connection.handle to `None` - - raise a dbt.exceptions.FailedToConnectException with the error and any other relevant information + - raise a `dbt.exceptions.FailedToConnectException` with the error and any other relevant information For example: @@ -190,7 +180,7 @@ For example: -#### get_response(cls, cursor) +##### `get_response(cls, cursor)` `get_response` is a classmethod that gets a cursor object and returns adapter-specific information about the last executed command. The return value should be an `AdapterResponse` object that includes items such as `code`, `rows_affected`, `bytes_processed`, and a summary `_message` for logging to stdout. @@ -206,14 +196,14 @@ For example: _message=status_message, code=code, rows_affected=rows - ) + ) ``` -#### cancel(self, connection) +##### `cancel(self, connection)` -cancel is an instance method that gets a connection object and attempts to cancel any ongoing queries, which is database dependent. Some databases don't support the concept of cancellation, they can simply implement it via 'pass' and their adapter classes should implement an `is_cancelable` that returns False - On ctrl+c connections may remain running. This method must be implemented carefully, as the affected connection will likely be in use in a different thread. +`cancel` is an instance method that gets a connection object and attempts to cancel any ongoing queries, which is database dependent. Some databases don't support the concept of cancellation, they can simply implement it via 'pass' and their adapter classes should implement an `is_cancelable` that returns False - On ctrl+c connections may remain running. This method must be implemented carefully, as the affected connection will likely be in use in a different thread. @@ -229,9 +219,9 @@ cancel is an instance method that gets a connection object and attempts to cance -#### exception_handler(self, sql, connection_name='master') +##### `exception_handler(self, sql, connection_name='master')` -exception_handler is an instance method that returns a context manager that will handle exceptions raised by running queries, catch them, log appropriately, and then raise exceptions dbt knows how to handle. +`exception_handler` is an instance method that returns a context manager that will handle exceptions raised by running queries, catch them, log appropriately, and then raise exceptions dbt knows how to handle. If you use the (highly recommended) `@contextmanager` decorator, you only have to wrap a `yield` inside a `try` block, like so: @@ -256,16 +246,46 @@ If you use the (highly recommended) `@contextmanager` decorator, you only have t +##### `standardize_grants_dict(self, grants_table: agate.Table) -> dict` + +`standardize_grants_dict` is an method that returns the dbt-standardized grants dictionary that matches how users configure grants now in dbt. The input is the result of `SHOW GRANTS ON {{model}}` call loaded into an agate table. + +If there's any massaging of agate table containing the results, of `SHOW GRANTS ON {{model}}`, that can't easily be accomplished in SQL, it can be done here. For example, the SQL to show grants *should* filter OUT any grants TO the current user/role (e.g. OWNERSHIP). If that's not possible in SQL, it can be done in this method instead. + + + +```python + @available + def standardize_grants_dict(self, grants_table: agate.Table) -> dict: + """ + :param grants_table: An agate table containing the query result of + the SQL returned by get_show_grant_sql + :return: A standardized dictionary matching the `grants` config + :rtype: dict + """ + grants_dict: Dict[str, List[str]] = {} + for row in grants_table: + grantee = row["grantee"] + privilege = row["privilege_type"] + if privilege in grants_dict.keys(): + grants_dict[privilege].append(grantee) + else: + grants_dict.update({privilege: [grantee]}) + return grants_dict +``` + + + ### Editing the adapter implementation Edit the connection manager at `myadapter/dbt/adapters/myadapter/impl.py` -Very little is required to implement the adapter itself. On some adapters, you will not need to override anything. On others, you'll likely need to override some of the convert_* classmethods, or override the `is_cancelable` classmethod on others to return False. +Very little is required to implement the adapter itself. On some adapters, you will not need to override anything. On others, you'll likely need to override some of the ``convert_*`` classmethods, or override the `is_cancelable` classmethod on others to return `False`. -#### datenow() +#### `datenow()` -This classmethod provides the adapter's canonical date function. This is not used but is required anyway on all adapters. +This classmethod provides the adapter's canonical date function. This is not used but is required– anyway on all adapters. @@ -281,23 +301,24 @@ This classmethod provides the adapter's canonical date function. This is not use dbt implements specific SQL operations using jinja macros. While reasonable defaults are provided for many such operations (like `create_schema`, `drop_schema`, `create_table`, etc), you may need to override one or more of macros when building a new adapter. -### Required macros +#### Required macros The following macros must be implemented, but you can override their behavior for your adapter using the "dispatch" pattern described below. Macros marked (required) do not have a valid default implementation, and are required for dbt to operate. -- `alter_column_type` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L140)) -- `check_schema_exists` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L224)) -- `create_schema` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L21)) -- `drop_relation` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L164)) -- `drop_schema` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L31)) -- `get_columns_in_relation` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L95)) (required) -- `list_relations_without_caching` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L240)) (required) -- `list_schemas` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L210)) -- `rename_relation` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L185)) -- `truncate_relation` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L175)) -- `current_timestamp` ([source](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/adapters/common.sql#L269)) (required) - -### Adapter dispatch +- `alter_column_type` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/columns.sql#L37-L55)) +- `check_schema_exists` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/metadata.sql#L43-L55)) +- `create_schema` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/schema.sql#L1-L9)) +- `drop_relation` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/relation.sql#L34-L42)) +- `drop_schema` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/schema.sql#L12-L20)) +- `get_columns_in_relation` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/columns.sql#L1-L8)) (required) +- `list_relations_without_caching` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/metadata.sql#L58-L65)) (required) +- `list_schemas` ([source](hhttps://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/metadata.sql#L29-L40)) +- `rename_relation` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/relation.sql#L56-L65)) +- `truncate_relation` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/relation.sql#L45-L53)) +- `current_timestamp` ([source](https://github.com/dbt-labs/dbt-core/blob/f988f76fccc1878aaf8d8631c05be3e9104b3b9a/core/dbt/include/global_project/macros/adapters/freshness.sql#L1-L8)) (required) +- `copy_grants` + +#### Adapter dispatch Most modern databases support a majority of the standard SQL spec. There are some databases that _do not_ support critical aspects of the SQL spec however, or they provide their own nonstandard mechanisms for implementing the same functionality. To account for these variations in SQL support, dbt provides a mechanism called [multiple dispatch](https://en.wikipedia.org/wiki/Multiple_dispatch) for macros. With this feature, macros can be overridden for specific adapters. This makes it possible to implement high-level methods (like "create ") in a database-specific way. @@ -342,7 +363,7 @@ The `adapter.dispatch()` macro takes a second argument, `packages`, which repres - "Shim" package examples: [`spark-utils`](https://github.com/dbt-labs/spark-utils), [`tsql-utils`](https://github.com/dbt-msft/tsql-utils) - [`adapter.dispatch` docs](dispatch) -### Overriding adapter methods +#### Overriding adapter methods While much of dbt's adapter-specific functionality can be modified in adapter macros, it can also make sense to override adapter methods directly. In this example, assume that a database does not support a `cascade` parameter to `drop schema`. Instead, we can implement an approximation where we drop each relation and then drop the schema. @@ -361,6 +382,9 @@ While much of dbt's adapter-specific functionality can be modified in adapter ma +#### Grants Macros + +See [this GitHub discussion](https://github.com/dbt-labs/dbt-core/discussions/5468) for information on the macros required for `GRANT` statements: ### Other files #### `profile_template.yml` @@ -375,17 +399,18 @@ See examples: #### `__version__.py` -To assure that `dbt --version` provides the latest dbt core version the adapter supports, be sure include a `__version__.py` file. The filepath will be `dbt/adapters//__version__.py`. We recommend using the latest dbt core version and as the adapter is made compatible with later versions, this file will need to be updated. For a sample file, check out this [example](https://github.com/dbt-labs/dbt-core/blob/develop/plugins/snowflake/dbt/adapters/snowflake/__version__.py). +To assure that `dbt --version` provides the latest dbt core version the adapter supports, be sure include a `__version__.py` file. The filepath will be `dbt/adapters//__version__.py`. We recommend using the latest dbt core version and as the adapter is made compatible with later versions, this file will need to be updated. For a sample file, check out this [example](https://github.com/dbt-labs/dbt-snowflake/blob/main/dbt/adapters/snowflake/__version__.py). + +It should be noted that both of these files are included in the bootstrapped output of the `dbt-database-adapter-scaffold` so when using the scaffolding, these files will be included. + +## Testing your new adapter -It should be noted that both of these files are included in the bootstrapped output of the `create_adapter_plugins.py` so when using that script, these files will be included. +This has moved to its own page: ["Testing a new adapter"](4-testing-a-new-adapter) -### Testing your new adapter +## Documenting your new adapter -This has moved to its own page: ["Testing a new adapter"](testing-a-new-adapter) +This has moved to its own page: ["Documenting a new adapter"](5-documenting-a-new-adapter) -### Documenting your new adapter +## Maintaining your new adapter -Many community members maintain their adapter plugins under open source licenses. If you're interested in doing this, we recommend: -- Hosting on a public git provider (e.g. GitHub, GitLab) -- Publishing to [PyPi](https://pypi.org/) -- Adding to the list of ["Available Adapters"](available-adapters#community-supported) +This has moved to a new spot: ["Maintaining your new adapter"](2-prerequisites-for-a-new-adapter##maintaining-your-new-adapter) diff --git a/website/docs/docs/contributing/testing-a-new-adapter.md b/website/docs/guides/advanced/adapter-development/4-testing-a-new-adapter.md similarity index 97% rename from website/docs/docs/contributing/testing-a-new-adapter.md rename to website/docs/guides/advanced/adapter-development/4-testing-a-new-adapter.md index 3d539a7a667..2fa0b3aaba3 100644 --- a/website/docs/docs/contributing/testing-a-new-adapter.md +++ b/website/docs/guides/advanced/adapter-development/4-testing-a-new-adapter.md @@ -1,6 +1,6 @@ --- title: "Testing a new adapter" -id: "testing-a-new-adapter" +id: "4-testing-a-new-adapter" --- :::info @@ -10,10 +10,12 @@ Previously, we offered a packaged suite of tests for dbt adapter functionality: ::: This document has two sections: -1. ["About the testing framework"](#about-the-testing-framework) describes the standard framework that we maintain for using pytest together with dbt. It includes an example that shows the anatomy of a simple test case. -2. ["Testing your adapter"](#testing-your-adapter) offers a step-by-step guide for using our out-of-the-box suite of "basic" tests, which will validate that your adapter meets a baseline of dbt functionality. + +1. "[About the testing framework](#about-the-testing-framework)" describes the standard framework that we maintain for using pytest together with dbt. It includes an example that shows the anatomy of a simple test case. +2. "[Testing your adapter](#testing-your-adapter)" offers a step-by-step guide for using our out-of-the-box suite of "basic" tests, which will validate that your adapter meets a baseline of dbt functionality. ## Prerequisites + - Your adapter must be compatible with dbt-core **v1.1** or newer - You should be familiar with **pytest**: https://docs.pytest.org/ @@ -201,7 +203,7 @@ In the course of creating and maintaining your adapter, it's likely that you wil 2. **Optional tests**, for second-order functionality that is common across plugins, but not required for basic use. Your plugin can opt into these test cases by inheriting existing ones, or reimplementing them with adjustments. For now, this category includes all tests located outside the `basic` subdirectory. More tests will be added as we convert older tests defined on dbt-core and mature plugins to use the standard framework. -3. **Custom tests**, for behavior that is specific to your adapter / data platform. Each data warehouse has its own specialties and idiosyncracies. We encourage you to use the same `pytest`-based framework, utilities, and fixtures to write your own custom tests for functionality that is unique to your adapter. +3. **Custom tests**, for behavior that is specific to your adapter / data platform. Each has its own specialties and idiosyncracies. We encourage you to use the same `pytest`-based framework, utilities, and fixtures to write your own custom tests for functionality that is unique to your adapter. If you run into an issue with the core framework, or the basic/optional test cases—or if you've written a custom test that you believe would be relevant and useful for other adapter plugin developers—please open an issue or PR in the `dbt-core` repository on GitHub. diff --git a/website/docs/docs/contributing/documenting-a-new-adapter.md b/website/docs/guides/advanced/adapter-development/5-documenting-a-new-adapter.md similarity index 79% rename from website/docs/docs/contributing/documenting-a-new-adapter.md rename to website/docs/guides/advanced/adapter-development/5-documenting-a-new-adapter.md index 5afc7c3cb19..9565ada14c7 100644 --- a/website/docs/docs/contributing/documenting-a-new-adapter.md +++ b/website/docs/guides/advanced/adapter-development/5-documenting-a-new-adapter.md @@ -1,18 +1,26 @@ --- title: "Documenting a new adapter" -id: "documenting-a-new-adapter" +id: "5-documenting-a-new-adapter" --- -If you've already [built](/website/docs/docs/contributing/building-a-new-adapter.md), and [tested]((/website/docs/docs/contributing/testing-a-new-adapter.md)) your adapter, it's time to document it so the dbt community will know that it exists and how to use it! +If you've already [built](3-building-a-new-adapter), and [tested](4-testing-a-new-adapter) your adapter, it's time to document it so the dbt community will know that it exists and how to use it. + +## Making your adapter available + +Many community members maintain their adapter plugins under open source licenses. If you're interested in doing this, we recommend: +- Hosting on a public git provider (for example, GitHub or Gitlab) +- Publishing to [PyPI](https://pypi.org/) +- Adding to the list of ["Supported Data Platforms"](supported-data-platforms#community-supported) (more info below) ## General Guidelines -To best inform the dbt community of the new adapter, you should contribute to the dbt's open-source documentation site, which uses the [Docusauraus project](https://docusaurus.io/). This is the site you're currently on! + +To best inform the dbt community of the new adapter, you should contribute to the dbt's open-source documentation site, which uses the [Docusaurus project](https://docusaurus.io/). This is the site you're currently on! ### Conventions Each `.md` file you create needs a header as shown below. The document id will also need to be added to the config file: `website/sidebars.js`. -``` +```md --- title: "Documenting a new adapter" id: "documenting-a-new-adapter" @@ -42,7 +50,7 @@ The following subjects need to be addressed across three pages of this docs site |----------------------|--------------------------------------------------------------|--------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | Connect | `reference/warehouse-profiles/{MY-DATA-PLATOFRM}-profile.md` | Create | Give all information needed to define a target in `~/.dbt/profiles.yml` and get `dbt debug` to connect to the database successfully. All possible configurations should be mentioned. | | Configure | `reference/resource-configs/{MY-DATA-PLATOFRM}-configs.md` | Create | What options and configuration specific to your data platform do users need to know? e.g. table distribution and indexing options, column_quoting policy, which incremental strategies are supported | -| Discover and Install | `docs/available-adapters.md` | Modify | Is it a vendor- or community- supported adapter? How to install Python adapter package? Ideally with pip and PyPI hosted package, but can also use `git+` link to GitHub Repo | +| Discover and Install | `docs/supported-data-platforms.md` | Modify | Is it a vendor- or community- supported adapter? How to install Python adapter package? Ideally with pip and PyPI hosted package, but can also use `git+` link to GitHub Repo | | Add link to sidebar | `website/sidebars.js` | Modify | Add the document id to the correct location in the sidebar menu | For example say I want to document my new adapter: `dbt-ders`. For the "Connect" page, I will make a new Markdown file, `ders-profile.md` and add it to the `website/docs/reference/warehouse-profiles/` directory. diff --git a/website/docs/guides/advanced/adapter-development/6-promoting-a-new-adapter.md b/website/docs/guides/advanced/adapter-development/6-promoting-a-new-adapter.md new file mode 100644 index 00000000000..206179203fd --- /dev/null +++ b/website/docs/guides/advanced/adapter-development/6-promoting-a-new-adapter.md @@ -0,0 +1,119 @@ +--- +title: "Promoting a new adapter" +id: "6-promoting-a-new-adapter" +--- + +## Model for engagement in the dbt community + +The most important thing here is recognizing that people are successful in the community when they join, first and foremost, to engage authentically. + +What does authentic engagement look like? It’s challenging to define explicit rules. One good rule of thumb is to treat people with dignity and respect. + +Contributors to the community should think of contribution *as the end itself,* not a means toward other business KPIs (leads, community members, etc.). [We are a mission-driven company.](https://www.getdbt.com/dbt-labs/values/) Some ways to know if you’re authentically engaging: + +- Is an engagement’s *primary* purpose of sharing knowledge and resources or building brand engagement? +- Imagine you didn’t work at the org you do — can you imagine yourself still writing this? +- Is it written in formal / marketing language, or does it sound like you, the human? + +## Who should join the dbt community slack + +### People who have insight into what it means to do hands-on [analytics engineering](https://www.getdbt.com/analytics-engineering/) work + +The dbt Community Slack workspace is fundamentally a place for analytics practitioners to interact with each other — the closer the users are in the community to actual data/analytics engineering work, the more natural their engagement will be (leading to better outcomes for partners and the community). + +### DevRel practitioners with strong focus + +DevRel practitioners often have a strong analytics background and a good understanding of the community. It’s essential to be sure they are focused on *contributing,* not on driving community metrics for partner org (such as signing people up for their slack or events). The metrics will rise naturally through authentic engagement. + +### Founder and executives who are interested in directly engaging with the community + +This is either incredibly successful or not at all depending on the profile of the founder. Typically, this works best when the founder has a practitioner-level of technical understanding and is interested in joining not to promote, but to learn and hear from users. + +### Software Engineers at partner products that are building and supporting integrations with either dbt Core or dbt Cloud + +This is successful when the engineers are familiar with dbt as a product or at least have taken our training course. The Slack is often a place where end-user questions and feedback is initially shared, so it is recommended that someone technical from the team be present. There are also a handful of channels aimed at those building integrations, which tend to be a font of knowledge. + +### Who might struggle in the dbt community +#### People in marketing roles +dbt Slack is not a marketing channel. Attempts to use it as such invariably fall flat and can even lead to people having a negative view of a product. This doesn’t mean that dbt can’t serve marketing objectives, but a long-term commitment to engagement is the only proven method to do this sustainably. + +#### People in product roles +The dbt Community can be an invaluable source of feedback on a product. There are two primary ways this can happen — organically (community members proactively suggesting a new feature) and via direct calls for feedback and user research. Immediate calls for engagement must be done in your dedicated #tools channel. Direct calls should be used sparingly, as they can overwhelm more organic discussions and feedback. + +## Who is the audience for an adapter release + +A new adapter is likely to drive huge community interest from several groups of people: +- People who are currently using the database that the adapter is supporting +- People who may be adopting the database in the near future. +- People who are interested in dbt development in general. + +The database users will be your primary audience and the most helpful in achieving success. Engage them directly in the adapter’s dedicated Slack channel. If one does not exist already, reach out in #channel-requests, and we will get one made for you and include it in an announcement about new channels. + +The final group is where non-slack community engagement becomes important. Twitter and LinkedIn are both great places to interact with a broad audience. A well-orchestrated adapter release can generate impactful and authentic engagement. + +## How to message the initial rollout and follow-up content + +Tell a story that engages dbt users and the community. Highlight new use cases and functionality unlocked by the adapter in a way that will resonate with each segment. + +### Existing users of your technology who are new to dbt + - Provide a general overview of the value dbt will deliver to your users. This can lean on dbt's messaging and talking points which are laid out in the [dbt viewpoint.](https://docs.getdbt.com/docs/about/viewpoint) + - Give examples of a rollout that speaks to the overall value of dbt and your product. + +### Users who are already familiar with dbt and the community +- Consider unique use cases or advantages your adapter provide over existing adapters. Who will be excited for this? +- Contribute to the dbt Community and ensure that dbt users on your adapter are well supported (tutorial content, packages, documentation, etc). +- Example of a rollout that is compelling for those familiar with dbt: [Firebolt](https://www.linkedin.com/feed/update/urn:li:activity:6879090752459182080/) + +## Tactically manage distribution of content about new or existing adapters + +There are tactical pieces on how and where to share that help ensure success. + +### On slack: +- #i-made-this channel — this channel has a policy against “marketing” and “content marketing” posts, but it should be successful if you write your content with the above guidelines in mind. Even with that, it’s important to post here sparingly. +- Your own database / tool channel — this is where the people who have opted in to receive communications from you and always a great place to share things that are relevant to them. + +### On social media: +- Twitter +- LinkedIn +- Social media posts *from the author* or an individual connected to the project tend to have better engagement than posts from a company or organization account. +- Ask your partner representative about: + - Retweets and shares from the official dbt Labs accounts. + - Flagging posts internally at dbt Labs to get individual employees to share. + +## Measuring engagement + +You don’t need 1000 people in a channel to succeed, but you need at least a few active participants who can make it feel lived in. If you’re comfortable working in public, this could be members of your team, or it can be a few people who you know that are highly engaged and would be interested in participating. Having even 2 or 3 regulars hanging out in a channel is all that’s needed for a successful start and is, in fact, much more impactful than 250 people that never post. + +## How to announce a new adapter + +We’d recommend *against* boilerplate announcements and encourage finding a unique voice. That being said, there are a couple of things that we’d want to include: + +- A summary of the value prop of your database / technology for users who aren’t familiar. +- The personas that might be interested in this news. +- A description of what the adapter *is*. For example: + > With the release of our new dbt adapter, you’ll be able to to use dbt to model and transform your data in [name-of-your-org] +- Particular or unique use cases or functionality unlocked by the adapter. +- Plans for future / ongoing support / development. +- The link to the documentation for using the adapter on the dbt Labs docs site. +- An announcement blog. + +## Announcing new release versions of existing adapters + +This can vary substantially depending on the nature of the release but a good baseline is the types of release messages that [we put out in the #dbt-releases](https://getdbt.slack.com/archives/C37J8BQEL/p1651242161526509) channel. + +![Full Release Post](/img/adapter-guide/0-full-release-notes.png) + +Breaking this down: + +- Visually distinctive announcement - make it clear this is a release + +- Short written description of what is in the release + +- Links to additional resources + +- Implementation instructions: + +- Future plans + +- Contributor recognition (if applicable) + diff --git a/website/docs/guides/advanced/adapter-development/7-verifying-a-new-adapter.md b/website/docs/guides/advanced/adapter-development/7-verifying-a-new-adapter.md new file mode 100644 index 00000000000..7fa36585877 --- /dev/null +++ b/website/docs/guides/advanced/adapter-development/7-verifying-a-new-adapter.md @@ -0,0 +1,41 @@ +--- +title: "Verifying a new adapter" +id: "7-verifying-a-new-adapter" +--- + +## Why verify an adapter? + +The very first data platform dbt supported was Redshift followed quickly by Postgres (([dbt-core#174](https://github.com/dbt-labs/dbt-core/pull/174)). In 2017, back when dbt Labs (née Fishtown Analytics) was still a data consultancy, we added support for Snowflake and BigQuery. We also turned dbt's database support into an adapter framework ([dbt-core#259](https://github.com/dbt-labs/dbt-core/pull/259/)), and a plugin system a few years later. For years, dbt Labs specialized in those four data platforms and became experts in them. However, the surface area of all possible databases, their respective nuances, and keeping them up-to-date and bug-free is a Herculean and/or Sisyphean task that couldn't be done by a single person or even a single team! Enter the dbt community which enables dbt Core to work on more than 30 different databases (32 as of Sep '22)! + +Free and open-source tools for the data professional are increasingly abundant. This is by-and-large a *good thing*, however it requires due dilligence that wasn't required in a paid-license, closed-source software world. Before taking a dependency on an open-source projet is is important to determine the answer to the following questions: + +1. Does it work? +2. Does it meet my team's specific use case? +3. Does anyone "own" the code, or is anyone liable for ensuring it works? +4. Do bugs get fixed quickly? +5. Does it stay up-to-date with new Core features? +6. Is the usage substantial enough to self-sustain? +7. What risks do I take on by taking a dependency on this library? + +These are valid, important questions to answer—especially given that `dbt-core` itself only put out its first stable release (major version v1.0) in December 2021! Indeed, up until now, the majority of new user questions in database-specific channels are some form of: +- "How mature is `dbt-`? Any gotchas I should be aware of before I start exploring?" +- "has anyone here used `dbt-` for production models?" +- "I've been playing with `dbt-` -- I was able to install and run my initial experiments. I noticed that there are certain features mentioned on the documentation that are marked as 'not ok' or 'not tested'. What are the risks? +I'd love to make a statement on my team to adopt DBT [sic], but I'm pretty sure questions will be asked around the possible limitations of the adapter or if there are other companies out there using dbt [sic] with Oracle DB in production, etc." + +There has been a tendency to trust the dbt Labs-maintained adapters over community- and vendor-supported adapters, but repo ownership is only one among many indicators of software quality. We aim to help our users feel well-informed as to the caliber of an adapter with a new program. + +## Verified by dbt Labs + +The adapter verification program aims to quickly indicate to users which adapters can be trusted to use in production. Previously, doing so was uncharted territory for new users and complicated making the business case to their leadership team. We plan to give quality assurances by: +1. appointing a key stakeholder for the adapter repository, +2. ensuring that the chosen stakeholder fixes bugs and cuts new releases in a timely manner see maintainer your adapter (["Maintaining your new adapter"](2-prerequisites-for-a-new-adapter#maintaining-your-new-adapter)), +3. demonstrating that it passes our adapter pytest suite tests, +4. assuring that it works for us internally and ideally an existing team using the adapter in production . + + +Every major & minor version of a adapter will be verified internally and given an official :white_check_mark: (custom emoji coming soon), on the ["Supported Data Platforms"](supported-data-platforms) page. + +## How to get an adapter verified? + +We envision that data platform vendors will be most interested in having their adapter versions verified, however we are open to community adapter verification. If interested, please reach out either to the `partnerships` at `dbtlabs.com` or post in the [#adapter-ecosystem Slack channel](https://getdbt.slack.com/archives/C030A0UF5LM). \ No newline at end of file diff --git a/website/docs/guides/best-practices/how-we-structure/1-guide-overview.md b/website/docs/guides/best-practices/how-we-structure/1-guide-overview.md new file mode 100644 index 00000000000..8a5aaa998be --- /dev/null +++ b/website/docs/guides/best-practices/how-we-structure/1-guide-overview.md @@ -0,0 +1,98 @@ +--- +title: How we structure our dbt projects +id: 1-guide-overview +--- + +## Why does structure matter? + +Analytics engineering, at its core, is about helping groups of human beings collaborate on better decisions at scale. We have [limited bandwidth for making decisions](https://en.wikipedia.org/wiki/Decision_fatigue). We also, as a cooperative social species, rely on [systems and patterns to optimize collaboration](https://en.wikipedia.org/wiki/Pattern_language) with others. This combination of traits means that for collaborative projects it's crucial to establish consistent and comprehensible norms such that your team’s limited bandwidth for decision making can be spent on unique and difficult problems, not deciding where folders should go or how to name files. + +Building a great dbt project is an inherently collaborative endeavor, bringing together domain knowledge from every department to map the goals and narratives of the entire company. As such, it's especially important to establish a deep and broad set of patterns to ensure as many people as possible are empowered to leverage their particular expertise in a positive way, and to ensure that the project remains approachable and maintainable as your organization scales. + +Famously, Steve Jobs [wore the same outfit everyday](https://images.squarespace-cdn.com/content/v1/5453c539e4b02ab5398ffc8f/1580381503218-E56FQDNFL1P4OBLQWHWW/ke17ZwdGBToddI8pDm48kJKedFpub2aPqa33K4gNUDwUqsxRUqqbr1mOJYKfIPR7LoDQ9mXPOjoJoqy81S2I8N_N4V1vUb5AoIIIbLZhVYxCRW4BPu10St3TBAUQYVKcxb5ZTIyC_D49_DDQq2Sj8YVGtM7O1i4h5tvKa2lazN4nGUQWMS_WcPM-ztWbVr-c/steve_jobs_outfit.jpg) to reduce decision fatigue. You can think of this guide similarly, as a black turtleneck and New Balance sneakers for your company’s dbt project. A dbt project’s power outfit, or more accurately its structure, is composed not of fabric but of files, folders, naming conventions, and programming patterns. How you label things, group them, split them up, or bring them together — the system you use to organize the data transformations encoded in your dbt project — this is your project’s structure. + +This guide is just a starting point. You may decide that you prefer Birkenstocks or a purple hoodie for your project over Jobs-ian minimalism. That's fine. What's important is that you think through the reasoning for those changes in your organization, explicitly declare them in a thorough, accessible way for all contributors, and above all *stay consistent*. + +One foundational principle that applies to all dbt projects though, is the need to establish a cohesive arc moving data from *source-conformed* to *business-conformed*. Source-conformed data is shaped by external systems out of our control, while business-conformed data is shaped by the needs, concepts, and definitions we create. No matter what patterns or conventions you define within your project, this process remains the essential purpose of the transformation layer, and dbt as your tool within it. This guide is an update to a seminal analytics engineering [post of the same name](https://discourse.getdbt.com/t/how-we-structure-our-dbt-projects/355) by the great Claire Carroll, and while some of the details have changed over time (as anticipated in that post) this fundamental trajectory holds true. Moving forward, this guide will be iteratively updated as new tools expand our viewpoints, new experiences sharpen our vision, and new voices strengthen our perspectives, but always in service of that aim. + +### Learning goals + +This guide has three main goals: + +- Thoroughly cover our most up-to-date recommendations on how to structure typical dbt projects +- Illustrate these recommendations with comprehensive examples +- At each stage, explain *why* we recommend the approach that we do, so that you're equipped to decide when and where to deviate from these recommendations to better fit your organization’s unique needs + +You should walk away from this guide with a deeper mental model of how the components of a dbt project fit together, such that purpose and principles of analytics engineering feel more clear and intuitive. + +By approaching our structure intentionally, we’ll gain a better understanding of foundational ideals like moving our data from the wide array of narrower source-conformed models that our systems give us to a narrower set of wider, richer business-conformed designs we create. As we move along that arc, we’ll understand how stacking our transformations in optimized, modular layers means we can apply each transformation in only one place. With a disciplined approach to the files, folders, and materializations that comprise our structure, we’ll find that we can create clear stories not only through our data, but also our codebase and the artifacts it generates in our warehouse. + +Our hope is that by deepening your sense of the connections between these patterns and the principles they flow from, you'll be able to translate them to fit your specific needs and craft customized documentation for your team to act on. + +:::info Example project. +This guide walks through our recommendations using a very simple dbt project — similar to the one used for the Getting Started guide and many other demos — from a fictional company called the Jaffle Shop. You can read more about [jaffles](https://en.wiktionary.org/wiki/jaffle) if you want (they *are* a real thing), but that context isn’t important to understand the structure. We encourage you to follow along, try things out, make changes, and take notes on what works or doesn't work for you along the way. +::: + +We'll get a deeper sense of our project as we move through the guide, but for now we just need to know that the Jaffle Shop is a restaurant selling jaffles that has two main data sources: + +- A replica of our transactional database, called `jaffle_shop`, with core entities like orders and customers. +- Synced data from [Stripe](https://stripe.com/), which we use for processing payments. + +### Guide structure overview + +We'll walk through our topics in the same order that our data would move through transformation: + +1. Dig into how we structure the files, folders, and models for our three primary layers in the `models` directory, which build on each other: + 1. **Staging** — creating our atoms, our initial modular building blocks, from source data + 2. **Intermediate** — stacking layers of logic with clear and specific purposes to prepare our staging models to join into the entities we want + 3. **Marts** — bringing together our modular pieces into a wide, rich vision of the entities our organization cares about +2. Explore how these layers fit into the rest of the project: + 1. Review the overall structure comprehensively + 2. Expand on YAML configuration in-depth + 3. Discuss how to use the other folders in a dbt project: `tests`, `seeds`, and `analysis` + +Below is the complete file tree of the project we’ll be working through. Don’t worry if this looks like a lot of information to take in at once - this is just to give you the full vision of what we’re building towards. We’ll focus in on each of the sections one by one as we break down the project’s structure. + +```markdown +jaffle_shop +├── README.md +├── analysis +├── data +│ └── employees.csv +├── dbt_project.yml +├── macros +│ └── cents_to_dollars.sql +├── models +│ ├── intermediate +│ │ └── finance +│ │ ├── _int_finance__models.yml +│ │ └── int_payments_pivoted_to_orders.sql +│ ├── marts +│ │ ├── finance +│ │ │ ├── _finance__models.yml +│ │ │ ├── orders.sql +│ │ │ └── payments.sql +│ │ └── marketing +│ │ ├── _marketing__models.yml +│ │ └── customers.sql +│ ├── staging +│ │ ├── jaffle_shop +│ │ │ ├── _jaffle_shop__docs.md +│ │ │ ├── _jaffle_shop__models.yml +│ │ │ ├── _jaffle_shop__sources.yml +│ │ │ ├── base +│ │ │ │ ├── base_jaffle_shop__customers.sql +│ │ │ │ └── base_jaffle_shop__deleted_customers.sql +│ │ │ ├── stg_jaffle_shop__customers.sql +│ │ │ └── stg_jaffle_shop__orders.sql +│ │ └── stripe +│ │ ├── _stripe__models.yml +│ │ ├── _stripe__sources.yml +│ │ └── stg_stripe__payments.sql +│ └── utilities +│ └── all_dates.sql +├── packages.yml +├── snapshots +└── tests + └── assert_positive_value_for_total_amount.sql +``` diff --git a/website/docs/guides/best-practices/how-we-structure/2-staging.md b/website/docs/guides/best-practices/how-we-structure/2-staging.md new file mode 100644 index 00000000000..225d1a3caf6 --- /dev/null +++ b/website/docs/guides/best-practices/how-we-structure/2-staging.md @@ -0,0 +1,220 @@ +--- +title: "Staging: Preparing our atomic building blocks" +id: 2-staging +--- + +The staging layer is where our journey begins. This is the foundation of our project, where we bring all the individual components we're going to use to build our more complex and useful models into the project. + +We'll use an analogy for working with dbt throughout this guide: thinking modularly in terms of atoms, molecules, and more complex outputs like proteins or cells (we apologize in advance to any chemists or biologists for our inevitable overstretching of this metaphor). Within that framework, if our source system data is a soup of raw energy and quarks, then you can think of the staging layer as condensing and refining this material into the individual atoms we’ll later build more intricate and useful structures with. + +### Staging: Files and folders + +Let's zoom into the staging directory from our `models` file tree [in the overview](/guides/best-practices/how-we-structure/1-guide-overview) and walk through what's going on here. + +```markdown +models/staging +├── jaffle_shop +│ ├── _jaffle_shop__docs.md +│ ├── _jaffle_shop__models.yml +│ ├── _jaffle_shop__sources.yml +│ ├── base +│ │ ├── base_jaffle_shop__customers.sql +│ │ └── base_jaffle_shop__deleted_customers.sql +│ ├── stg_jaffle_shop__customers.sql +│ └── stg_jaffle_shop__orders.sql +└── stripe + ├── _stripe__models.yml + ├── _stripe__sources.yml + └── stg_stripe__payments.sql +``` + +- **Folders.** Folder structure is extremely important in dbt. Not only do we need a consistent structure to find our way around the codebase, as with any software project, but our folder structure is also one of the key interfaces into understanding the knowledge graph encoded in our project (alongside the DAG and the data output into our warehouse). It should reflect how the data flows, step-by-step, from a wide variety of source-conformed models into fewer, richer business-conformed models. Moreover, we can use our folder structure as a means of selection in dbt [selector syntax](https://docs.getdbt.com/reference/node-selection/syntax). For example, with the above structure, if we got fresh Stripe data loaded and wanted to run all the models that build on our Stripe data, we can easily run `dbt build --select staging.stripe+` and we’re all set building more up-to-date reports on payments. + - ✅ **Subdirectories based on the source system**. Our internal transactional database is one system, the data we get from Stripe's API is another, and lastly the events from our Snowplow instrumentation. We've found this to be the best grouping for most companies, as source systems tend to share similar loading methods and properties between tables, and this allows us to operate on those similar sets easily. + - ❌ **Subdirectories based on loader.** Some people attempt to group by how the data is loaded (Fivetran, Stitch, custom syncs), but this is too broad to be useful on a project of any real size. + - ❌ **Subdirectories based on business grouping.** Another approach we recommend against is splitting up by business groupings in the staging layer, and creating subdirectories like 'marketing', 'finance', etc. A key goal of any great dbt project should be establishing a single source of truth. By breaking things up too early, we open ourselves up to create overlap and conflicting definitions (think marketing and financing having different fundamental tables for orders). We want everybody to be building with the same set of atoms, so in our experience, starting our transformations with our staging structure reflecting the source system structures is the best level of grouping for this step. +- **File names.** Creating a consistent pattern of file naming is [crucial in dbt](https://docs.getdbt.com/blog/on-the-importance-of-naming). File names must be unique and correspond to the name of the model when selected and created in the warehouse. We recommend putting as much clear information into the file name as possible, including a prefix for the layer the model exists in, important grouping information, and specific information about the entity or transformation in the model. + - ✅ `stg_[source]__[entity]s.sql` - the double underscore between source system and entity helps visually distinguish the separate parts in the case of a source name having multiple words. For instance, `google_analytics__campaigns` is always understandable, whereas to somebody unfamiliar `google_analytics_campaigns` could be `analytics_campaigns` from the `google` source system as easily as `campaigns` from the `google_analytics` source system. Think of it like an [oxford comma](https://www.youtube.com/watch?v=P_i1xk07o4g), the extra clarity is very much worth the extra punctuation. + - ❌ `stg_[entity].sql` - might be specific enough at first, but will break down in time. Adding the source system into the file name aids in discoverability, and allows understanding where a component model came from even if you aren't looking at the file tree. + - ✅ **Plural.** SQL, and particularly SQL in dbt, should read as much like prose as we can achieve. We want to lean into the broad clarity and declarative nature of SQL when possible. As such, unless there’s a single order in your `orders` table, plural is the correct way to describe what is in a table with multiple rows. + +### Staging: Models + +Now that we’ve got a feel for how the files and folders fit together, let’s look inside one of these files and dig into what makes for a well-structured staging model. + +Below, is an example of a standard staging model (from our `stg_stripe__payments` model) that illustrates the common patterns within the staging layer. We’ve organized our model into two CTEs: one pulling in a source table via the [source macro](https://docs.getdbt.com/docs/building-a-dbt-project/using-sources#selecting-from-a-source) and the other applying our transformations. + +While our later layers of transformation will vary greatly from model to model, every one of our staging models will follow this exact same pattern. As such, we need to make sure the pattern we’ve established is rock solid and consistent. + +```sql +-- stg_stripe__payments.sql + +with + +source as ( + + select * from {{ source('stripe','payment') }} + +), + +renamed as ( + + select + -- ids + id as payment_id, + orderid as order_id, + + -- strings + paymentmethod as payment_method, + case + when payment_method in ('stripe', 'paypal', 'credit_card', 'gift_card') then 'credit' + else 'cash' + end as payment_type, + status, + + -- numerics + amount as amount_cents, + amount / 100.0 as amount, + + -- booleans + case + when status = 'successful' then true + else false + end as is_completed_payment, + + -- dates + date_trunc('day', created) as created_date, + + -- timestamps + created::timestamp_ltz as created_at + + from source + +) + +select * from renamed +``` + +- Based on the above, the most standard types of staging model transformations are: + - ✅ **Renaming** + - ✅ **Type casting** + - ✅ **Basic computations** (e.g. cents to dollars) + - ✅ **Categorizing** (using conditional logic to group values into buckets or booleans, such as in the `case when` statements above) + - ❌ **Joins** — the goal of staging models is to clean and prepare individual source conformed concepts for downstream usage. We're creating the most useful version of a source system table, which we can use as a new modular component for our project. In our experience, joins are almost always a bad idea here — they create immediate duplicated computation and confusing relationships that ripple downstream — there are occasionally exceptions though (see [base models](guides/best-practices/how-we-structure/2-staging#staging-other-considerations) below). + - ❌ **Aggregations** — aggregations entail grouping, and we're not doing that at this stage. Remember - staging models are your place to create the building blocks you’ll use all throughout the rest of your project — if we start changing the grain of our tables by grouping in this layer, we’ll lose access to source data that we’ll likely need at some point. We just want to get our individual concepts cleaned and ready for use, and will handle aggregating values downstream. +- ✅ **Materialized as views.** Looking at a partial view of our `dbt_project.yml` below, we can see that we’ve configured the entire staging directory to be materialized as views. As they’re not intended to be final artifacts themselves, but rather building blocks for later models, staging models should typically be materialized as views for two key reasons: + - Any downstream model (discussed more in [marts](/guides/best-practices/how-we-structure/4-marts)) referencing our staging models will always get the freshest data possible from all of the component views it’s pulling together and materializing + - It avoids wasting space in the warehouse on models that are not intended to be queried by data consumers, and thus do not need to perform as quickly or efficiently + + ```yaml + # dbt_project.yml + + models: + jaffle_shop: + staging: + +materialized: view + ``` + +- Staging models are the only place we'll use the [`source` macro](/docs/build/sources), and our staging models should have a 1-to-1 relationship to our source tables. That means for each source system table we’ll have a single staging model referencing it, acting as its entry point — *staging* it — for use downstream. + +:::tip Don’t Repeat Yourself. +Staging models help us keep our code DRY. dbt's modular, reusable structure means we can, and should, push any transformations that we’ll always want to use for a given component model as far upstream as possible. This saves us from potentially wasting code, complexity, and compute doing the same transformation more than once. For instance, if we know we always want our monetary values as floats in dollars, but the source system is integers and cents, we want to do the division and type casting as early as possible so that we can reference it rather than redo it repeatedly downstream. +::: + +This is a welcome change for many of us who have become used to applying the same sets of SQL transformations in many places out of necessity! For us, the earliest point for these 'always-want' transformations is the staging layer, the initial entry point in our transformation process. The DRY principle is ultimately the litmus test for whether transformations should happen in the staging layer. If we'll want them in every downstream model and they help us eliminate repeated code, they're probably okay. + +### Staging: Other considerations + +- **Base models when joins are necessary to stage concepts.** Sometimes, in order to maintain a clean and DRY staging layer we do need to implement some joins to create a solid concept for our building blocks. In these cases, we recommend creating a sub-directory in the staging directory for the source system in question and building `base` models. These have all the same properties that would normally be in the staging layer, they will directly source the raw data and do the non-joining transformations, then in the staging models we’ll join the requisite base models. The most common use cases for building a base layer under a staging folder are: + - ✅ **Joining in separate delete tables**. Sometime a source system might store deletes in a separate table. Typically we’ll want to make sure we can mark or filter out deleted records for all our component models, so we’ll need to join these delete records up to any of our entities that follow this pattern. This is the example shown below to illustrate. + + ```sql + -- base_jaffle_shop__customers.sql + + with + + source as ( + + select * from {{ source('jaffle_shop','customers') }} + + ), + + customers as ( + + select + id as customer_id, + first_name, + last_name + + from source + + ) + + select * from customers + ``` + + ```sql + -- base_jaffle_shop__deleted_customers.sql + + with + + source as ( + + select * from {{ source('jaffle_shop','customer_deletes') }} + + ), + + deleted_customers as ( + + select + id as customer_id, + deleted as deleted_at + + from source + + ) + + select * from deleted_customers + ``` + + ```sql + -- stg_jaffle_shop__customers.sql + + with + + customers as ( + + select * from {{ ref('base_jaffle_shop__customers') }} + + ), + + deleted_customers as ( + + select * from {{ ref('base_jaffle_shop__deleted_customers') }} + + ), + + join_and_mark_deleted_customers as ( + + select + customers.*, + case + when deleted_customers.deleted_at is not null then true + else false + end as is_deleted + + from customers + + left join deleted_customers on customers.customer_id = deleted_customers.customer_id + + ) + + select * from join_and_mark_deleted_customers + ``` + + - ✅ **Unioning disparate but symmetrical sources**. A typical example here would be if you operate multiple ecommerce platforms in various territories via a SaaS platform like Shopify. You would have perfectly identical schemas, but all loaded separately into your warehouse. In this case, it’s easier to reason about our orders if *all* of our shops are unioned together, so we’d want to handle the unioning in a base model before we carry on with our usual staging model transformations on the (now complete) set — you can dig into [more detail on this use case here](https://discourse.getdbt.com/t/unioning-identically-structured-data-sources/921). +- **[Codegen](https://github.com/dbt-labs/dbt-codegen) to automate staging table generation.** It’s very good practice to learn to write staging models by hand, they’re straightforward and numerous, so they can be an excellent way to absorb the dbt style of writing SQL. Also, we’ll invariably find ourselves needing to add special elements to specific models at times — for instance, in one of the situations above that require base models — so it’s helpful to deeply understand how they work. Once that understanding is established though, because staging models are built largely following the same rote patterns and need to be built 1-to-1 for each source table in a source system, it’s preferable to start automating their creation. For this, we have the [codegen](https://github.com/dbt-labs/dbt-codegen) package. This will let you automatically generate all the source YAML and staging model boilerplate to speed up this step, and we recommend using it in every project. +- **Utilities folder.** While this is not in the `staging` folder, it’s useful to consider as part of our fundamental building blocks. The `models/utilities` directory is where we can keep any general purpose models that we generate from macros or based on seeds that provide tools to help us do our modeling, rather than data to model itself. The most common use case is a [date spine](https://github.com/dbt-labs/dbt-utils#date_spine-source) generated with [the dbt utils package](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/). + +:::info Development flow versus DAG order. +This guide follows the order of the DAG, so we can get a holistic picture of how these three primary layers build on each other towards fueling impactful data products. It’s important to note though that developing models does not typically move linearly through the DAG. Most commonly, we should start by mocking out a design in a spreadsheet so we know we’re aligned with our stakeholders on output goals. Then, we’ll want to write the SQL to generate that output, and identify what tables are involved. Once we have our logic and dependencies, we’ll make sure we’ve staged all the necessary atomic pieces into the project, then bring them together based on the logic we wrote to generate our mart. Finally, with a functioning model flowing in dbt, we can start refactoring and optimizing that mart. By splitting the logic up and moving parts back upstream into intermediate models, we ensure all of our models are clean and readable, the story of our DAG is clear, and we have more surface area to apply thorough testing. +:::info diff --git a/website/docs/guides/best-practices/how-we-structure/3-intermediate.md b/website/docs/guides/best-practices/how-we-structure/3-intermediate.md new file mode 100644 index 00000000000..a98881fa969 --- /dev/null +++ b/website/docs/guides/best-practices/how-we-structure/3-intermediate.md @@ -0,0 +1,99 @@ +--- +title: "Intermediate: Purpose-built transformation steps" +id: 3-intermediate +--- + +Once we’ve got our atoms ready to work with, we’ll set about bringing them together into more intricate, connected molecular shapes. The intermediate layer is where these molecules live, creating varied forms with specific purposes on the way towards the more complex proteins and cells we’ll use to breathe life into our data products. + +### Intermediate: Files and folders + +Let’s take a look at the intermediate layer of our project to understand the purpose of this stage more concretely. + +```markdown +models/intermediate +└── finance + ├── _int_finance__models.yml + └── int_payments_pivoted_to_orders.sql +``` + +- **Folders** + - ✅ **Subdirectories based on business groupings.** Much like the staging layer, we’ll house this layer of models inside their own `intermediate` subfolder. Unlike the staging layer, here we shift towards being business-conformed, splitting our models up into subdirectories not by their source system, but their area of business concern. +- **File names** + - `✅ int_[entity]s_[verb]s.sql` - the variety of transformations that can happen inside of the intermediate layer make it harder to dictate strictly how to name them. The best guiding principle is to think about *verbs* (e.g. `pivoted`, `aggregated_to_user`, `joined`, `fanned_out_by_quantity`, `funnel_created`, etc.) in the intermediate layer. In our example project, we use an intermediate model to pivot payments out to the order grain, so we name our model `int_payments_pivoted_to_orders`. It’s easy for anybody to quickly understand what’s happening in that model, even if they don’t know SQL. That clarity is worth the long file name. It’s important to note that we’ve dropped the double underscores at this layer. In moving towards business conformed concepts, we no longer need to separate a system and an entity and simply reference the unified entity if possible. In cases where you need intermediate models to operate at the source system level (e.g. `int_shopify__orders_summed`, `int_core__orders_summed` which you would later union), you’d preserve the double underscores. Some people like to separate the entity and verbs with double underscores as well. That’s a matter of preference, but in our experience there are often intrinsic connection between entities and verbs in this layer that make that difficult to maintain. + +:::tip Don’t over-optimize too early! +The example project is very simple for illustrative purposes. This level of division in our post-staging layers is probably unnecessary when dealing with this few models. Remember, our goal is a *single* *source of truth.* We don’t want finance and marketing operating on separate `orders` models, we want to use our dbt project as a means to bring those definitions together! As such, don’t split and optimize too early. If you have less than 10 marts models and aren’t having problems developing and using them, feel free to forego subdirectories completely (except in the staging layer, where you should always implement them as you add new source systems to your project) until the project has grown to really need them. Using dbt is always about bringing simplicity to complexity. +::: + +### Intermediate: Models + +Below is the lone intermediate model from our small example project. This represents an excellent use case per our principles above, serving a clear single purpose: grouping and pivoting a staging model to different grain. It utilizes a bit of Jinja to make the model DRY-er (striving to be DRY applies to the code we write inside a single model in addition to transformations across the codebase), but don’t be intimidated if you’re not quite comfortable with [Jinja](https://docs.getdbt.com/docs/building-a-dbt-project/jinja-macros) yet. Looking at the name of the CTE, `pivot_and_aggregate_payments_to_order_grain` we get a very clear idea of what’s happening inside this block. By descriptively labeling the transformations happening inside our CTEs within model, just as we do with our files and folders, even a stakeholder who doesn’t know SQL would be able to grasp the purpose of this section, if not the code. As you begin to write more complex transformations moving out of the staging layer, keep this idea in mind. In the same way our models connect into a DAG and tell the story of our transformations on a macro scale, CTEs can do this on a smaller scale inside our model files. + +```sql +-- int_payments_pivoted_to_orders.sql + +{%- set payment_methods = ['bank_transfer','credit_card','coupon','gift_card'] -%} + +with + +payments as ( + + select * from {{ ref('stg_stripe__payments') }} + +), + +pivot_and_aggregate_payments_to_order_grain as ( + + select + order_id, + {% for payment_method in payment_methods -%} + + sum( + case + when payment_method = '{{ payment_method }}' and + status = 'success' + then amount + else 0 + end + ) as {{ payment_method }}_amount, + + {%- endfor %} + sum(case when status = 'success' then amount end) as total_amount + + from payments + + group by 1 + +) + +select * from pivot_and_aggregate_payments_to_order_grain +``` + +- ❌ **Exposed to end users.** Intermediate models should generally not be exposed in the main production schema. They are not intended for output to final targets like dashboards or applications, so it’s best to keep them separated from models that are so you can more easily control data governance and discoverability. +- ✅ **Materialized ephemerally.** Considering the above, one popular option is to default to intermediate models being materialized [ephemerally](/docs/build/materializations#ephemeral). This is generally the best place to start for simplicity. It will keep unnecessary models out of your warehouse with minimum configuration. Keep in mind though that the simplicity of ephemerals does translate a bit more difficulty in troubleshooting, as they’re interpolated into the models that `ref` them, rather than existing on their own in a way that you can view the output of. +- ✅ **Materialized as views in a custom schema with special permissions.** A more robust option is to materialize your intermediate models as views in a specific [custom schema](/docs/build/custom-schemas), outside of your main production schema. This gives you added insight into development and easier troubleshooting as the number and complexity of your models grows, while remaining easy to implement and taking up negligible space. + +:::tip Keep your warehouse tidy! +There are three interfaces to the organizational knowledge graph we’re encoding into dbt: the DAG, the files and folder structure of our codebase, and the output into the warehouse. As such, it’s really important that we consider that output intentionally! Think of the schemas, tables, and views we’re creating in the warehouse as *part of the UX,* in addition to the dashboards, ML, apps, and other use cases you may be targeting for the data. Ensuring that our output is named and grouped well, and that models not intended for broad use are either not materialized or built into special areas with specific permissions is crucial to achieve this. +::: + +- Intermediate models’ purposes, as these serve to break up complexity from our marts models, can take as many forms as data transformation might require. Some of the most common use cases of intermediate models include: + + - ✅ **Structural simplification.** Bringing together a reasonable number (typically 4 to 6) of entities or concepts (staging models, or perhaps other intermediate models) that will be joined with another similarly purposed intermediate model to generate a mart — rather than have 10 joins in our mart, we can join two intermediate models that each house a piece of the complexity, giving us increased readability, flexibility, testing surface area, and insight into our components. + - ✅ **Re-graining.** Intermediate models are often used to fan out or collapse models to the right composite grain — if we’re building a mart for `order_items` that requires us to fan out our `orders` based on the `quantity` column, creating a new single row for each item, this would be ideal to do in a specific intermediate model to maintain clarity in our mart and more easily view that our grain is correct before we mix it with other components + - ✅ **Isolating complex operations.** It’s helpful to move any particularly complex or difficult to understand pieces of logic into their own intermediate models. This not only makes them easier to refine and troubleshoot, but simplifies later models that can reference this concept in a more clearly readable way. For example, in the `quantity` fan out example above, we benefit by isolating this complex piece of logic so we can quickly debug and thoroughly test that transformation, and downstream models can reference `order_items` in a way that’s intuitively easy to grasp. + - ❌ **Used repeatedly in multiple models.** If we’re referencing the same intermediate model in several places, it probably should be a macro, or we should reconsider how and where we’re bringing our models together. Remember our DRY principle: while it’s good that we’ve modularized our logic, bringing the same intermediate model into several downstream models usually indicates that we’re creating duplication in our output too early and recomputing the same transformations. Ideally, it’s in the *marts* layer that we’ll start bringing concepts together repeatedly to form a variety of outputs that often have similar data. Be wary of doing this in the intermediate layer, as it can lead to your DAG becoming confusing — generally an indicator that the mental models of our project flow are getting muddied. Additionally, it’s usually more efficient to join marts as they have been materialized into the warehouse, meaning we don’t have to recompute the entire chain of transformations in addition to the computing the joins. + + ![Multiple *outputs* from an intermediate model, bringing the same model into several different marts, is typically a red flag in our DAG that we’re computing the same transformations multiple times.](/img/guides/best-practices/how-we-structure/widening-dag.png) + + Multiple *outputs* from an intermediate model, bringing the same model into several different marts, is typically a red flag in our DAG that we’re computing the same transformations multiple times. + + - ✅ **Used in one model.** Bringing our intermediate models, staging models, and other marts together thoughtfully so our models can build on each other efficiently. + + ![Multiple *inputs* to a model is generally preferable. Several inputs and one output at each node indicates we’re bringing our concepts together into richer, wider entities; forming a healthy DAG shaped like an arrowhead pointing to the right.](/img/guides/best-practices/how-we-structure/narrowing-dag.png) + + Multiple *inputs* to a model is generally preferable. Several inputs and one output at each node indicates we’re bringing our concepts together into richer, wider entities; forming a healthy DAG shaped like an arrowhead pointing to the right. + +:::tip Narrow the DAG, widen the tables. +Until we get to the marts layer and start building our various outputs, we ideally want our DAG to look like an arrowhead pointed right. As we move from source-conformed to business-conformed, we’re also moving from numerous, narrow, isolated concepts to fewer, wider, joined concepts. We’re bringing our components together into wider, richer concepts, and that creates this shape in our DAG. This way when we get to the marts layer we have a robust set of components that can quickly and easily be put into any configuration to answer a variety of questions and serve specific needs. One rule of thumb to ensure you’re following this pattern on an individual model level is allowing multiple *inputs* to a model, but **not** multiple *outputs*. Several arrows going *into* our post-staging models is great and expected, several arrows coming *out* is a red flag. There are absolutely situations where you need to break this rule, but it’s something to be aware of, careful about, and avoid when possible. +::: diff --git a/website/docs/guides/best-practices/how-we-structure/4-marts.md b/website/docs/guides/best-practices/how-we-structure/4-marts.md new file mode 100644 index 00000000000..3cf4bc78209 --- /dev/null +++ b/website/docs/guides/best-practices/how-we-structure/4-marts.md @@ -0,0 +1,137 @@ +--- +title: "Marts: Business-defined entities" +id: 4-marts +--- + +This is the layer where everything comes together and we start to arrange all of our atoms (staging models) and molecules (intermediate models) into full-fledged cells that have identity and purpose. We sometimes like to call this the *entity* *layer* or *concept layer*, to emphasize that all our marts are meant to represent a specific entity or concept at its unique grain. For instance, an order, a customer, a territory, a click event, a payment — each of these would be represented with a distinct mart, and each row would represent a discrete instance of these concepts. Unlike in a traditional Kimball star schema though, in modern data warehousing — where storage is cheap and compute is expensive — we’ll happily borrow and add any and all data from other concepts that are relevant to answering questions about the mart’s core entity. Building the same data in multiple places, as we do with `orders` in our `customers` mart example below, is more efficient in this paradigm than repeatedly rejoining these concepts (this is a basic definition of denormalization in this context). Let’s take a look at how we approach this first layer intended expressly for exposure to end users. + +### Marts: Files and folders + +The last layer of our core transformations is below, providing models for both `finance` and `marketing` departments. + +```markdown +models/marts +├── finance +│ ├── _finance__models.yml +│ ├── orders.sql +│ └── payments.sql +└── marketing + ├── _marketing__models.yml + └── customers.sql +``` + +✅ **Group by department or area of concern.** If you have fewer than 10 or so marts you may not have much need for subfolders, so as with the intermediate layer, don’t over-optimize too early. If you do find yourself needing to insert more structure and grouping though, use useful business concepts here. In our marts layer, we’re no longer worried about source-conformed data, so grouping by departments (marketing, finance, etc.) is the most common structure at this stage. + +✅ **Name by entity.** Use plain English to name the file based on the concept that forms the grain of the mart `customers`, `orders`. Note that for pure marts, there should not be a time dimension (`orders_per_day`) here, that is typically best captured via metrics. + +❌ **Build the same concept differently for different teams.** `finance_orders` and `marketing_orders` is typically considered an anti-pattern. There are, as always, exceptions — a common pattern we see is that, finance may have specific needs, for example reporting revenue to the government in a way that diverges from how the company as a whole measures revenue day-to-day. Just make sure that these are clearly designed and understandable as *separate* concepts, not departmental views on the same concept: `tax_revenue` and `revenue` not `finance_revenue` and `marketing_revenue`. + +### Marts: Models + +Finally we’ll take a look at the best practices for models within the marts directory by examining two of our marts models. These are the business-conformed — that is, crafted to our vision and needs — entities we’ve been bringing these transformed components together to create. + +```sql +-- orders.sql + +with + +orders as ( + + select * from {{ ref('stg_jaffle_shop__orders' )}} + +), + +order_payments as ( + + select * from {{ ref('int_payments_pivoted_to_orders') }} + +), + +orders_and_order_payments_joined as ( + + select + orders.order_id, + orders.customer_id, + orders.order_date, + coalesce(order_payments.total_amount, 0) as amount, + coalesce(order_payments.gift_card_amount, 0) as gift_card_amount + + from orders + + left join order_payments on orders.order_id = order_payments.order_id + +) + +select * from orders_and_payments_joined +``` + +```sql +-- customers.sql + +with + +customers as ( + + select * from {{ ref('stg_jaffle_shop__customers')}} + +), + +orders as ( + + select * from {{ ref('orders')}} + +), + +customer_orders as ( + + select + customer_id, + min(order_date) as first_order_date, + max(order_date) as most_recent_order_date, + count(order_id) as number_of_orders, + sum(amount) as lifetime_value + + from orders + + group by 1 + +), + +customers_and_customer_orders_joined as ( + + select + customers.customer_id, + customers.first_name, + customers.last_name, + customer_orders.first_order_date, + customer_orders.most_recent_order_date, + coalesce(customer_orders.number_of_orders, 0) as number_of_orders, + customer_orders.lifetime_value + + from customers + + left join customer_orders on customers.customer_id = customer_orders.customer_id + +) + +select * from customers_and_customer_orders_joined +``` + +- ✅ **Materialized as tables or incremental models.** Once we reach the marts layer, it’s time to start building not just our logic into the warehouse, but the data itself. This gives end users much faster performance for these later models that are actually designed for their use, and saves us costs recomputing these entire chains of models every time somebody refreshes a dashboard or runs a regression in python. A good general rule of thumb regarding materialization is to always start with a view (as it takes up essentially no storage and always gives you up-to-date results), once that view takes too long to practically *query*, build it into a table, and finally once that table takes too long to *build* and is slowing down your runs, [configure it as an incremental model](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models/). As always, start simple and only add complexity as necessary. The models with the most data and compute-intensive transformations should absolutely take advantage of dbt’s excellent incremental materialization options, but rushing to make all your marts models incremental by default will introduce superfluous difficulty. We recommend reading this [classic post from Tristan on the limits of incremental modeling](https://discourse.getdbt.com/t/on-the-limits-of-incrementality/303). +- ✅ **Wide and denormalized.** Unlike old school warehousing, in the modern data stack storage is cheap and it’s compute that is expensive and must be prioritized as such, packing these into very wide denormalized concepts that can provide everything somebody needs about a concept as a goal. +- ❌ **Too many joins in one mart.** One good rule of thumb when building dbt transformations is to avoid bringing together too many concepts in a single mart. What constitutes ‘too many’ can vary. If you need to bring 8 staging models together with nothing but simple joins, that might be fine. Conversely, if you have 4 concepts you’re weaving together with some complex and computationally heavy window functions, that could be too much. You need to weigh the number of models you’re joining against the complexity of the logic within the mart, and if it’s too much to read through and build a clear mental model of then look to modularize. While this isn’t a hard rule, if you’re bringing together more than 4 or 5 concepts to create your mart, you may benefit from adding some intermediate models for added clarity. Two intermediate models that bring together three concepts each, and a mart that brings together those two intermediate models, will typically result in a much more readable chain of logic than a single mart with six joins. +- ✅ **Build on separate marts thoughtfully.** While we strive to preserve a narrowing DAG up to the marts layer, once here things may start to get a little less strict. A common example is passing information between marts at different grains, as we saw above, where we bring our `orders` mart into our `customers` marts to aggregate critical order data into a `customer` grain. Now that we’re really ‘spending’ compute and storage by actually building the data in our outputs, it’s sensible to leverage previously built resources to speed up and save costs on outputs that require similar data, versus recomputing the same views and CTEs from scratch. The right approach here is heavily dependent on your unique DAG, models, and goals — it’s just important to note that using a mart in building another, later mart is okay, but requires careful consideration to avoid wasted resources or circular dependencies. + +:::tip Marts are entity-grained. +The most important aspect of marts is that they contain all of the useful data about a *particular entity* at a granular level. That doesn’t mean we don’t bring in lots of other entities and concepts, like tons of `user` data into our `orders` mart, we do! It just means that individual `orders` remain the core grain of our table. If we start grouping `users` and `orders` along a [date spine](https://github.com/dbt-labs/dbt-utils#date_spine-source), into something like `user_orders_per_day`, we’re moving past marts into *metrics*. +::: + +### Marts: Other considerations + +- **Troubleshoot via tables.** While stacking views and ephemeral models up until our marts — only building data into the warehouse at the end of a chain when we have the models we really want end users to work with — is ideal in production, it can present some difficulties in development. Particularly, certain errors may seem to be surfacing in our later models that actually stem from much earlier dependencies in our model chain (ancestor models in our DAG that are built before the model throwing the errors). If you’re having trouble pinning down where or what a database error is telling you, it can be helpful to temporarily build a specific chain of models as tables so that the warehouse will throw the error where it’s actually occurring. +- **After marts: the activation layer.** In the same way that our staging models are building blocks for our marts, that also offer us direct views into specific source data, our marts are building blocks for our final outputs that also offer direct views into specific ideas. You can use marts directly, but they are equally important as components for building models in the *activation layer* after marts. This is a deep and fast-evolving topic, so we’ll cover this in a separate forthcoming guide that dives into: + - Metrics + - Reverse ETL + - Reporting and dashboards + - Data science and ML + - [Exposures](https://docs.getdbt.com/docs/build/exposures) (how we tie our dbt DAG into all of the above) diff --git a/website/docs/guides/best-practices/how-we-structure/5-the-rest-of-the-project.md b/website/docs/guides/best-practices/how-we-structure/5-the-rest-of-the-project.md new file mode 100644 index 00000000000..ea54462dabb --- /dev/null +++ b/website/docs/guides/best-practices/how-we-structure/5-the-rest-of-the-project.md @@ -0,0 +1,113 @@ +--- +title: "The rest of the project" +id: 5-the-rest-of-the-project +--- + +### Project structure review + +So far we’ve focused on the `models` folder, the primary directory of our dbt project. Next, we’ll zoom out and look at how the rest of our project files and folders fit in with this structure, starting with how we approach YAML configuration files. + +```markdown +models +├── intermediate +│ └── finance +│ ├── _int_finance__models.yml +│ └── int_payments_pivoted_to_orders.sql +├── marts +│ ├── finance +│ │ ├── _finance__models.yml +│ │ ├── orders.sql +│ │ └── payments.sql +│ └── marketing +│ ├── _marketing__models.yml +│ └── customers.sql +├── staging +│ ├── jaffle_shop +│ │ ├── _jaffle_shop__docs.md +│ │ ├── _jaffle_shop__models.yml +│ │ ├── _jaffle_shop__sources.yml +│ │ ├── base +│ │ │ ├── base_jaffle_shop__customers.sql +│ │ │ └── base_jaffle_shop__deleted_customers.sql +│ │ ├── stg_jaffle_shop__customers.sql +│ │ └── stg_jaffle_shop__orders.sql +│ └── stripe +│ ├── _stripe__models.yml +│ ├── _stripe__sources.yml +│ └── stg_stripe__payments.sql +└── utilities + └── all_dates.sql +``` + +### YAML in-depth + +When structuring your YAML configuration files in a dbt project, you want to balance centralization and file size to make specific configs as easy to find as possible. It’s important to note that while the top-level YAML files (`dbt_project.yml`, `packages.yml`) need to be specifically named and in specific locations, the files containing your `sources` and `models` dictionaries can be named, located, and organized however you want. It’s the internal contents that matter here. As such, we’ll lay out our primary recommendation, as well as the pros and cons of a popular alternative. Like many other aspects of structuring your dbt project, what’s most important here is consistency, clear intention, and thorough documentation on how and why you do what you do. + +- ✅ **Config per folder.** As in the example above, create a `_[directory]__models.yml` per directory in your models folder that configures all the models in that directory. for staging folders, also include a `_[directory]__sources.yml` per directory. + - The leading underscore ensure your YAML files will be sorted to the top of every folder to make them easy to separate from your models. + - YAML files don’t need unique names in the way that SQL model files do, but including the directory (instead of simply `_sources.yml` in each folder), means you can fuzzy find for the right file more quickly. + - We’ve recommended several different naming conventions over the years, most recently calling these `schema.yml` files. We’ve simplified to recommend that these simply be labelled based on the YAML dictionary that they contain. + - If you utilize [doc blocks](https://docs.getdbt.com/docs/building-a-dbt-project/documentation#using-docs-blocks) in your project, we recommend following the same pattern, and creating a `_[directory]__docs.md` markdown file per directory containing all your doc blocks for that folder of models. +- ❌ **Config per project.** Some people put *all* of their source and model YAML into one file. While you can technically do this, and while it certainly simplifies knowing what file the config you’re looking for will be in (as there is only one file), it makes it much harder to find specific configurations within that file. We recommend balancing those two concerns. +- ⚠️ **Config per model.** On the other end of the spectrum, some people prefer to create one YAML file per model. This presents less of an issue than a single monolith file, as you can quickly search for files, know exactly where specific configurations exist, spot models without configs (and thus without tests) by looking at the file tree, and various other advantages. In our opinion, the extra files, tabs, and windows this requires creating, copying from, pasting to, closing, opening, and managing creates a somewhat slower development experience that outweighs the benefits. Defining config per directory is the most balanced approach for most projects, but if you have compelling reasons to use config per model, there are definitely some great projects that follow this paradigm. +- ✅ **Cascade configs.** Leverage your `dbt_project.yml` to set default configurations at the directory level. Use the well-organized folder structure we’ve created thus far to define the baseline schemas and materializations, and use dbt’s cascading scope priority to define variations to this. For example, as below, define your marts to be materialized as tables by default, define separate schemas for our separate subfolders, and any models that need to use incremental materialization can be defined at the model level. + +```yaml +-- dbt_project.yml + +models: + jaffle_shop: + staging: + +materialized: view + intermediate: + +materialized: ephemeral + marts: + +materialized: table + finance: + +schema: finance + marketing: + +schema: marketing +``` + +:::tip Define your defaults. +One of the many benefits this consistent approach to project structure confers to us is this ability to cascade default behavior. Carefully organizing our folders and defining configuration at that level whenever possible frees us from configuring things like schema and materialization in every single model (not very DRY!) — we only need to configure exceptions to our general rules. Tagging is another area this principle comes into play. Many people new to dbt will rely on tags rather than a rigorous folder structure, and quickly find themselves in a place where every model *requires* a tag. This creates unnecessary complexity. We want to lean on our folders as our primary selectors and grouping mechanism, and use tags to define groups that are *exceptions.* A folder-based selection like **`dbt build --select marts.marketing` is much simpler than trying to tag every marketing-related model, hoping all developers remember to add that tag for new models, and using `dbt build --select tag:marketing`. +::: + +### How we use the other folders + +```yaml +jaffle_shop +├── analysis +├── data +│ └── employees.csv +├── macros +│ ├── _macros.yml +│ └── cents_to_dollars.sql +├── snapshots +└── tests + └── assert_positive_value_for_total_amount.sql +``` + +We’ve focused heavily thus far on the primary area of action in our dbt project, the `models` folder. As you’ve probably observed though, there are several other folders in our project. While these are, by design, very flexible to your needs, we’ll discuss the most common use cases for these other folders to help get you started. + +- ✅ `seeds` for lookup tables. The most common use case for seeds is loading lookup tables that are helpful for modeling but don’t exist in any source systems — think mapping zip codes to states, or UTM parameters to marketing campaigns. In this example project we have a small seed that maps our employees to their `customer_id`s, so that we can handle their purchases with special logic. +- ❌ `seeds` for loading source data. Do not use seeds to load data from a source system into your warehouse. If it exists in a system you have access to, you should be loading it with a proper EL tool into the raw data area of your warehouse. dbt is designed to operate on data in the warehouse, not as a data-loading tool. +- ✅ `analysis` for storing auditing queries. The `analysis` folder lets you store any queries you want to use Jinja with and version control, but not build into models in your warehouse. There are limitless possibilities here, but the most common use case when we set up projects at dbt Labs is to keep queries that leverage the [audit helper](https://github.com/dbt-labs/dbt-audit-helper) package. This package is incredibly useful for finding discrepancies in output when migrating logic from another system into dbt. +- ✅ `tests` for testing multiple specific tables simultaneously. As dbt tests have evolved, writing singular tests has become less and less necessary. It's extremely useful for work-shopping test logic, but more often than not you'll find yourself either migrating that logic into your own custom generic tests or discovering a pre-built test that meets your needs from the ever-expanding universe of dbt packages (between the extra tests in [`dbt-utils`](https://github.com/dbt-labs/dbt-utils) and [`dbt-expectations`](https://github.com/calogica/dbt-expectations) almost any situation is covered). One area where singular tests still shine though is flexibly testing things that require a variety of specific models. If you're familiar with the difference between [unit tests](https://en.wikipedia.org/wiki/Unit_testing) [and](https://www.testim.io/blog/unit-test-vs-integration-test/) [integration](https://www.codecademy.com/resources/blog/what-is-integration-testing/) [tests](https://en.wikipedia.org/wiki/Integration_testing) in software engineering, you can think of generic and singular tests in a similar way. If you need to test the results of how several specific models interact or relate to each other, a singular test will likely be the quickest way to nail down your logic. +- ✅ `snapshots` for creating [Type 2 slowly changing dimension](https://en.wikipedia.org/wiki/Slowly_changing_dimension#Type_2:_add_new_row) records from [Type 1](https://en.wikipedia.org/wiki/Slowly_changing_dimension#Type_1:_overwrite) (destructively updated) source data. This is [covered thoroughly in the dbt Docs](https://docs.getdbt.com/docs/building-a-dbt-project/snapshots), unlike these other folders has a more defined purpose, and is out-of-scope for this guide, but mentioned for completion. +- ✅ `macros` for DRY-ing up transformations you find yourself doing repeatedly. Like snapshots, a full dive into macros is out-of-scope for this guide and well [covered elsewhere](https://docs.getdbt.com/docs/building-a-dbt-project/jinja-macros/), but one important structure-related recommendation is to [write documentation for your macros](https://docs.getdbt.com/faqs/docs/documenting-macros). We recommend creating a `_macros.yml` and documenting the purpose and arguments for your macros once they’re ready for use. + +### Project splitting + +One important, growing consideration in the analytics engineering ecosystem is how and when to split a codebase into multiple dbt projects. Our present stance on this for most projects, particularly for teams starting out, is straightforward: you should avoid it unless you have no other option or it saves you from an even more complex workaround. If you do have the need to split up your project, it’s completely possible through the use of private packages, but the added complexity and separation is, for most organizations, a hindrance not a help, at present. That said, this is very likely subject to change! [We want to create a world where it’s easy to bring lots of dbt projects together into a cohesive lineage](https://github.com/dbt-labs/dbt-core/discussions/5244). In a world where it’s simple to break up monolithic dbt projects into multiple connected projects, perhaps inside of a modern monorepo, the calculus will be different, and the below situations we recommend against may become totally viable. So watch this space! + +- ❌ **Business groups or departments.** Conceptual separations within the project are not a good reason to split up your project. Splitting up, for instance, marketing and finance modeling into separate projects will not only add unnecessary complexity, but destroy the unifying effect of collaborating across your organization on cohesive definitions and business logic. +- ❌ **ML vs Reporting use cases.** Similarly to the point above, splitting a project up based on different use cases, particularly more standard BI versus ML features, is a common idea. We tend to discourage it for the time being. As with the previous point, a foundational goal of implementing dbt is to create a single source of truth in your organization. The features you’re providing to your data science teams should be coming from the same marts and metrics that serve reports on executive dashboards. There are a growing number of tools like [fal](https://blog.fal.ai/introducing-fal-dbt/) and [Continual.ai](http://Continual.ai) that make excellent use of this unified viewpoint. +- ✅ **Data governance.** Structural, organizational needs — such as data governance and security — are one of the few worthwhile reasons to split up a project. If, for instance, you work at a healthcare company with only a small team cleared to access raw data with PII in it, you may need to split out your staging models into their own project to preserve those policies. In that case, you would import your staging project into the project that builds on those staging models as a [private package](https://docs.getdbt.com/docs/building-a-dbt-project/package-management/#private-packages). +- ✅ **Project size.** At a certain point, your project may grow to have simply too many models to present a viable development experiment. If you have 1000s of models, it absolutely makes sense to find a way to split up your project. + +## Final considerations + +Overall, consistency is more important than any of these specific conventions. As your project grows and your experience with dbt deepens, you will undoubtedly find aspects of the above structure you want to change. While we recommend this approach for the majority of projects, every organization is unique! The only dogmatic advice we’ll put forward here is that when you find aspects of the above structure you wish to change, think intently about your reasoning and document for your team *how* and *why* you are deviating from these conventions. To that end, we highly encourage you to fork this guide and add it to your project’s README, wiki, or docs so you can quickly create and customize those artifacts. + +Finally, we emphasize that this guide is a living document! It will certainly change and grow as dbt and dbt Labs evolve. We invite you to join in — discuss, comment, and contribute regarding suggested changes or new elements to cover. diff --git a/website/docs/docs/guides/best-practices.md b/website/docs/guides/legacy/best-practices.md similarity index 91% rename from website/docs/docs/guides/best-practices.md rename to website/docs/guides/legacy/best-practices.md index ed529710170..5a6983bb3fe 100644 --- a/website/docs/docs/guides/best-practices.md +++ b/website/docs/guides/legacy/best-practices.md @@ -11,19 +11,19 @@ All dbt projects should be managed in version control. Git branches should be cr :::info Git guide -We've codified our best practices in Git, in our [Git guide](https://github.com/dbt-labs/corp/blob/master/git-guide.md). +We've codified our best practices in Git, in our [Git guide](https://github.com/dbt-labs/corp/blob/main/git-guide.md). ::: ### Use separate development and production environments -dbt makes it easy to maintain separate production and development environments through the use of target within a profile. We recommend using a `dev` target when running dbt from your command line, and only running against a `prod` target when running from a production deployment. You can read more [about managing environments](managing-environments). +dbt makes it easy to maintain separate production and development environments through the use of target within a profile. We recommend using a `dev` target when running dbt from your command line, and only running against a `prod` target when running from a production deployment. You can read more [about managing environments](/docs/collaborate/environments). ### Use a style guide and for your project SQL styles, field naming conventions, and other rules for your dbt project should be codified, especially on projects where multiple dbt users are writing code. :::info Our style guide -We've made our [style guide](https://github.com/dbt-labs/corp/blob/master/dbt_style_guide.md) public – these can act as a good starting point for your own style guide. +We've made our [style guide](https://github.com/dbt-labs/corp/blob/main/dbt_style_guide.md) public – these can act as a good starting point for your own style guide. ::: @@ -38,7 +38,7 @@ Your dbt project will depend on raw data stored in your database. Since this dat :::info Using sources for raw data references -As of v0.13.0, we recommend defining your raw data as [sources](using-sources), and selecting from the source rather than using the direct relation reference. Our dbt projects no longer contain any direct relation references in any models. +As of v0.13.0, we recommend defining your raw data as [sources](/docs/build/sources), and selecting from the source rather than using the direct relation reference. Our dbt projects no longer contain any direct relation references in any models. ::: @@ -81,13 +81,14 @@ dbt provides a framework to test assumptions about the results generated by a mo :::info Recommended tests -Our [style guide](https://github.com/dbt-labs/corp/blob/master/dbt_style_guide.md) recommends that at a minimum, every model should have a that is tested to ensure it is unique, and not null. +Our [style guide](https://github.com/dbt-labs/corp/blob/main/dbt_style_guide.md) recommends that at a minimum, every model should have a that is tested to ensure it is unique, and not null. ::: ### Consider the information architecture of your data warehouse -When a user connects to a data warehouse via a SQL client, they often rely on the names of schemas, relations, and columns, to understand the data they are presented with. To improve the information architecture of a data warehouse, we: -* Use [custom schemas](using-custom-schemas) to separate relations into logical groupings, or hide intermediate models in a separate schema. Generally, these custom schemas align with the directories we use to group our models, and are configured from the `dbt_project.yml` file. +When a user connects to a via a SQL client, they often rely on the names of schemas, relations, and columns, to understand the data they are presented with. To improve the information architecture of a data warehouse, we: + +* Use [custom schemas](/docs/build/custom-schemas) to separate relations into logical groupings, or hide intermediate models in a separate schema. Generally, these custom schemas align with the directories we use to group our models, and are configured from the `dbt_project.yml` file. * Use prefixes in names (for example, `stg_`, `fct_` and `dim_`) to indicate which relations should be queried by end users. ### Choose your materializations wisely @@ -107,7 +108,7 @@ We often: When developing, it often makes sense to only run the model you are actively working on and any downstream models. You can choose which models to run by using the [model selection syntax](node-selection/syntax). ### Run only modified models to test changes ("slim CI") -To merge code changes with confidence, you want to know that those changes will not cause breakages elsewhere in your project. For that reason, we recommend running models and tests in a sandboxed environment, separated from your production data, as an automatic check in your git workflow. (If you use GitHub and dbt Cloud, read about [how to set up CI jobs](cloud-enabling-continuous-integration-with-github).) +To merge code changes with confidence, you want to know that those changes will not cause breakages elsewhere in your project. For that reason, we recommend running models and tests in a sandboxed environment, separated from your production data, as an automatic check in your git workflow. (If you use GitHub and dbt Cloud, read about [how to set up CI jobs](/docs/deploy/cloud-ci-job). At the same time, it costs time (and money) to run and test all the models in your project. This inefficiency feels especially painful if your PR only proposes changes to a handful of models. diff --git a/website/docs/docs/guides/building-packages.md b/website/docs/guides/legacy/building-packages.md similarity index 87% rename from website/docs/docs/guides/building-packages.md rename to website/docs/guides/legacy/building-packages.md index 121e863093a..55f9821852e 100644 --- a/website/docs/docs/guides/building-packages.md +++ b/website/docs/guides/legacy/building-packages.md @@ -5,7 +5,7 @@ id: "building-packages" ## Assumed knowledge This article assumes you are familiar with: -- [packages](package-management) +- [packages](/docs/build/packages) - administering a repository on GitHub - [semantic versioning](https://semver.org/) @@ -41,7 +41,7 @@ When working on your package, we often find it useful to install a local copy of ### Follow our best practices _Modeling packages only_ -Use our [dbt coding conventions](https://github.com/dbt-labs/corp/blob/master/dbt_style_guide.md), our article on [how we structure our dbt projects](https://discourse.getdbt.com/t/how-we-structure-our-dbt-projects/355), and our [best practices](best-practices) for all of our advice on how to build your dbt project. +Use our [dbt coding conventions](https://github.com/dbt-labs/corp/blob/main/dbt_style_guide.md), our article on [how we structure our dbt projects](https://discourse.getdbt.com/t/how-we-structure-our-dbt-projects/355), and our [best practices](best-practices) for all of our advice on how to build your dbt project. This is where it comes in especially handy to have worked on your own dbt project previously. @@ -50,7 +50,7 @@ _Modeling packages only_ Not every user of your package is going to store their Mailchimp data in a schema named `mailchimp`. As such, you'll need to make the location of raw data configurable. -We recommend using [sources](using-sources) and [variables](using-variables) to achieve this. Check out [this package](https://github.com/fivetran/dbt_facebook_ads_source/blob/master/models/src_facebook_ads.yml#L5-L6) for an example — notably, the README [includes instructions](https://github.com/fivetran/dbt_facebook_ads_source#configuration) on how to override the default schema from a `dbt_project.yml` file. +We recommend using [sources](/docs/build/sources) and [variables](/docs/build/project-variables) to achieve this. Check out [this package](https://github.com/fivetran/dbt_facebook_ads_source/blob/main/models/src_facebook_ads.yml#L5-L6) for an example — notably, the README [includes instructions](https://github.com/fivetran/dbt_facebook_ads_source#configuration) on how to override the default schema from a `dbt_project.yml` file. ### Install upstream packages from hub.getdbt.com @@ -75,10 +75,10 @@ Many SQL functions are specific to a particular database. For example, the funct If you wish to support multiple warehouses, we have a number of tricks up our sleeve: - We've written a number of macros that compile to valid SQL snippets on each of the original four adapters. Where possible, leverage these macros. - If you need to implement cross-database compatibility for one of your macros, use the [`adapter.dispatch` macro](dispatch) to achieve this. Check out the cross-database macros in dbt-utils for examples. -- If you're working on a modeling package, you may notice that you need write different models for each warehouse (for example, if the EL tool you are working with stores data differently on each warehouse). In this case, you can write different versions of each model, and use the [`enabled` config](enabled), in combination with [`target.type`](dbt-jinja-functions/target) to enable the correct models — check out [this package](https://github.com/fivetran/dbt_facebook_ads_creative_history/blob/master/dbt_project.yml#L11-L16) as an example. +- If you're working on a modeling package, you may notice that you need write different models for each warehouse (for example, if the EL tool you are working with stores data differently on each warehouse). In this case, you can write different versions of each model, and use the [`enabled` config](enabled), in combination with [`target.type`](/reference/dbt-jinja-functions/target) to enable the correct models — check out [this package](https://github.com/fivetran/dbt_facebook_ads_creative_history/blob/main/dbt_project.yml#L11-L16) as an example. -If your package has only been written to work for one data warehouse, make sure you document this in your package README. +If your package has only been written to work for one , make sure you document this in your package README. ### Use specific model names _Modeling packages only_ @@ -92,7 +92,7 @@ dbt makes it possible for users of your package to override your model -Finally, it's frequently desirable to only update a date partitioned table for the last day of received data. This can be implemented using the above configurations in conjunction with a clever macro and some [command line variables](using-variables). +Finally, it's frequently desirable to only update a date partitioned table for the last day of received data. This can be implemented using the above configurations in conjunction with a clever macro and some [command line variables](/docs/build/project-variables). First, the macro: diff --git a/website/docs/docs/guides/creating-new-materializations.md b/website/docs/guides/legacy/creating-new-materializations.md similarity index 91% rename from website/docs/docs/guides/creating-new-materializations.md rename to website/docs/guides/legacy/creating-new-materializations.md index 4428c92fd37..05cf790a297 100644 --- a/website/docs/docs/guides/creating-new-materializations.md +++ b/website/docs/guides/legacy/creating-new-materializations.md @@ -89,7 +89,7 @@ Pre- and post-hooks can be specified for any model -- be sure that your material ``` ### Executing SQL -Construct your materialization to account for the different permutations of existence, materialization flags, etc. There are a number of [adapter functions](dbt-jinja-functions/adapter) and context variables that can help you here. Be sure to consult the Reference section of this site for a full list of variables and functions at your disposal. +Construct your materialization to account for the different permutations of existence, materialization flags, etc. There are a number of [adapter functions](/reference/dbt-jinja-functions/adapter) and context variables that can help you here. Be sure to consult the Reference section of this site for a full list of variables and functions at your disposal. ### Run post-hooks @@ -148,11 +148,11 @@ Materializations should [return](return) the list of Relations that they have cr If a materialization solely creates a single relation, then returning that relation at the end of the materialization is sufficient to synchronize the dbt Relation cache. If the materialization *renames* or *drops* Relations other than the relation returned by the materialization, then additional work is required to keep the cache in sync with the database. -To explicitly remove a relation from the cache, use [adapter.drop_relation](dbt-jinja-functions/adapter). To explicitly rename a relation in the cache, use [adapter.rename_relation](dbt-jinja-functions/adapter). Calling these methods is preferable to executing the corresponding SQL directly, as they will mutate the cache as required. If you do need to execute the SQL to drop or rename relations directly, use the `adapter.cache_dropped` and `adapter.cache_renamed` methods to synchronize the cache. +To explicitly remove a relation from the cache, use [adapter.drop_relation](/reference/dbt-jinja-functions/adapter). To explicitly rename a relation in the cache, use [adapter.rename_relation](/reference/dbt-jinja-functions/adapter). Calling these methods is preferable to executing the corresponding SQL directly, as they will mutate the cache as required. If you do need to execute the SQL to drop or rename relations directly, use the `adapter.cache_dropped` and `adapter.cache_renamed` methods to synchronize the cache. ## Materialization Configuration -Materializations support custom configuration. You might be familiar with some of these configs from materializations like `unique_key` in [incremental models](configuring-incremental-models) or `strategy` in [snapshots](snapshots) . +Materializations support custom configuration. You might be familiar with some of these configs from materializations like `unique_key` in [incremental models](/docs/build/incremental-models) or `strategy` in [snapshots](snapshots) . ### Specifying configuration options @@ -165,7 +165,7 @@ config.get('optional_config_name', default="the default") config.require('required_conifg_name') ``` -For more information on the `config` dbt Jinja function, see the [config](dbt-jinja-functions/config) reference. +For more information on the `config` dbt Jinja function, see the [config](/reference/dbt-jinja-functions/config) reference. ## Materialization precedence diff --git a/website/docs/docs/guides/custom-generic-tests.md b/website/docs/guides/legacy/custom-generic-tests.md similarity index 94% rename from website/docs/docs/guides/custom-generic-tests.md rename to website/docs/guides/legacy/custom-generic-tests.md index 4809669bf9b..601e80a1254 100644 --- a/website/docs/docs/guides/custom-generic-tests.md +++ b/website/docs/guides/legacy/custom-generic-tests.md @@ -20,7 +20,7 @@ There are tons of generic tests defined in open source packages, such as [dbt-ut Generic tests are defined in SQL files. Those files can live in two places: - `tests/generic/`: that is, a special subfolder named `generic` within your [test paths](test-paths) (`tests/` by default) -- `macros/`: Why? Generic tests work work a lot like macros, and historically, this was the only place they could be defined. If your generic test depends on complex macro logic, you may find it more convenient to define the macros and the generic test in the same file. +- `macros/`: Why? Generic tests work a lot like macros, and historically, this was the only place they could be defined. If your generic test depends on complex macro logic, you may find it more convenient to define the macros and the generic test in the same file. To define your own generic tests, simply create a `test` block called ``. All generic tests should accept one or both of the standard arguments: - `model`: The resource on which the test is defined, templated out to its relation name. (Note that the argument is always named `model`, even when the resource is a source, seed, or snapshot.) @@ -143,7 +143,7 @@ models: It is possible to include a `config()` block in a generic test definition. Values set there will set defaults for all specific instances of that generic test, unless overridden within the specific instance's `.yml` properties. - + ```sql {% test warn_if_odd(model, column_name) %} @@ -184,7 +184,7 @@ models: To change the way a built-in generic test works—whether to add additional parameters, re-write the SQL, or for any other reason—you simply add a test block named `` to your own project. dbt will favor your version over the global implementation! - + ```sql {% test unique(model, column_name) %} diff --git a/website/docs/docs/guides/debugging-errors.md b/website/docs/guides/legacy/debugging-errors.md similarity index 96% rename from website/docs/docs/guides/debugging-errors.md rename to website/docs/guides/legacy/debugging-errors.md index 4465c96a283..254f06b69e9 100644 --- a/website/docs/docs/guides/debugging-errors.md +++ b/website/docs/guides/legacy/debugging-errors.md @@ -11,7 +11,7 @@ Learning how to debug is a skill, and one that will make you great at your role! - The `target/run` directory contains the SQL dbt executes to build your models. - The `logs/dbt.log` file contains all the queries that dbt runs, and additional logging. Recent errors will be at the bottom of the file. - **dbt Cloud users**: Use the above, or the `Details` tab in the command output. - - **dbt CLI users**: Note that your code editor _may_ be hiding these files from the tree ([Atom help](https://discuss.atom.io/t/all-gitignored-files-are-hidden-now-atom-1-15/39238), [VSCode help](https://stackoverflow.com/questions/42891463/how-can-i-show-ignored-files-in-visual-studio-code)). + - **dbt CLI users**: Note that your code editor _may_ be hiding these files from the tree [VSCode help](https://stackoverflow.com/questions/42891463/how-can-i-show-ignored-files-in-visual-studio-code)). 5. If you are really stuck, try [asking for help](getting-help). Before doing so, take the time to write your question well so that others can diagnose the problem quickly. @@ -315,11 +315,11 @@ Found a cycle: model.jaffle_shop.customers --> model.jaffle_shop.stg_customers - Your dbt DAG is not acyclic, and needs to be fixed! - Update the `ref` functions to break the cycle. -- If you need to reference the current model, use the [`{{ this }}` variable](dbt-jinja-functions/this) instead. +- If you need to reference the current model, use the [`{{ this }}` variable](/reference/dbt-jinja-functions/this) instead. ## Database Errors -The thorniest errors of all! These errors come from your data warehouse, and dbt passes the message on. You may need to use your warehouse docs (i.e. the Snowflake docs, or BigQuery docs) to debug these. +The thorniest errors of all! These errors come from your , and dbt passes the message on. You may need to use your warehouse docs (i.e. the Snowflake docs, or BigQuery docs) to debug these. ``` $ dbt run diff --git a/website/docs/docs/guides/debugging-schema-names.md b/website/docs/guides/legacy/debugging-schema-names.md similarity index 95% rename from website/docs/docs/guides/debugging-schema-names.md rename to website/docs/guides/legacy/debugging-schema-names.md index c902a0383fd..3a786afe5b7 100644 --- a/website/docs/docs/guides/debugging-schema-names.md +++ b/website/docs/guides/legacy/debugging-schema-names.md @@ -4,8 +4,8 @@ title: Debugging schema names If a model uses the [`schema` config](resource-configs/schema) but builds under an unexpected schema, here are some steps for debugging the issue. -:::info -The full explanation on custom schemas can be found [here](using-custom-schemas). +:::info +The full explanation on custom schemas can be found [here](/docs/build/custom-schemas). ::: You can also follow along via this video: @@ -16,7 +16,7 @@ You can also follow along via this video: Do a file search to check if you have a macro named `generate_schema_name` in the `macros` directory of your project. #### I do not have a macro named `generate_schema_name` in my project -This means that you are using dbt's default implementation of the macro, as defined [here](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/etc/get_custom_schema.sql#L17-L30) +This means that you are using dbt's default implementation of the macro, as defined [here](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql#L17-L30) ```sql {% macro generate_schema_name(custom_schema_name, node) -%} diff --git a/website/docs/docs/guides/getting-help.md b/website/docs/guides/legacy/getting-help.md similarity index 92% rename from website/docs/docs/guides/getting-help.md rename to website/docs/guides/legacy/getting-help.md index 1f3e06a535b..7be3a6a9c1b 100644 --- a/website/docs/docs/guides/getting-help.md +++ b/website/docs/guides/legacy/getting-help.md @@ -6,9 +6,8 @@ dbt is open source, and has a generous community behind it. Asking questions wel ### 1. Try to solve your problem first before asking for help -#### Check out the existing documentation -We invest heavily in our docs, so this should be the first place you turn! If you're a new dbt user, spend some time completing the [Getting Started tutorial](/tutorial/getting-started) to get familiar with dbt. - +#### Search the existing documentation +The docs site you're on is highly searchable, make sure to explore for the answer here as a first step. If you're new to dbt, try working through the [Getting Started guide](/docs/get-started/getting-started/overview) first to get a firm foundation on the essential concepts. #### Try to debug the issue yourself We have a handy guide on [debugging errors](debugging-errors) to help out! This guide also helps explain why errors occur, and which docs you might need to search for help. @@ -40,7 +39,7 @@ Sometimes you might hit a boundary of dbt because you're trying to use it in a w ### 3. Choose the right medium for your question We use a number of different mediums to share information -- If your question is roughly "I've hit this error and am stuck", please ask it on [Stack Overflow](https://stackoverflow.com/questions/ask?tags=dbt). +- If your question is roughly "I've hit this error and am stuck", please ask it on [the dbt Community Forum](https://discourse.getdbt.com). - If you think you've found a bug, please report it on the relevant GitHub repo (e.g. [dbt repo](https://github.com/dbt-labs/dbt), [dbt-utils repo](https://github.com/dbt-labs/dbt-utils)) - If you are looking for an opinionated answer (e.g. "What's the best approach to X?", "Why is Y done this way?"), then, feel free to join our [Slack community](https://community.getdbt.com/) and ask it in the correct channel: * **#advice-dbt-for-beginners:** A great channel if you're getting started with dbt and want to understand how it works. diff --git a/website/docs/docs/guides/understanding-state.md b/website/docs/guides/legacy/understanding-state.md similarity index 100% rename from website/docs/docs/guides/understanding-state.md rename to website/docs/guides/legacy/understanding-state.md diff --git a/website/docs/docs/guides/videos.md b/website/docs/guides/legacy/videos.md similarity index 100% rename from website/docs/docs/guides/videos.md rename to website/docs/guides/legacy/videos.md diff --git a/website/docs/guides/migration/tools/migrating-from-spark-to-databricks.md b/website/docs/guides/migration/tools/migrating-from-spark-to-databricks.md new file mode 100644 index 00000000000..1a7d41600ba --- /dev/null +++ b/website/docs/guides/migration/tools/migrating-from-spark-to-databricks.md @@ -0,0 +1,111 @@ +--- +title: "Migrating from dbt-spark to dbt-databricks" +id: "migrating-from-spark-to-databricks" +--- + + +## Pre-requisites + +In order to migrate to dbt-databricks, your project must be compatible with `dbt 1.0` or greater as dbt-databricks is not supported pre `dbt 1.0`. [This guide](https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.0) will help you upgrade your project if necessary. + +## Why change to dbt-databricks? + +The Databricks team, in collaboration with dbt Labs, built on top of the foundation that the dbt Labs’ dbt-spark adapter provided, and they added some critical improvements. The dbt-databricks adapter offers an easier set up, as it only requires three inputs for authentication, and it also has more features available via the Delta file format. + +### Authentication Simplification + +Previously users had to provide a `cluster` or `endpoint` ID which was hard to parse out of the http_path provided in the Databricks UI. Now the [dbt-databricks profile](https://docs.getdbt.com/reference/warehouse-setups/databricks-setup) requires the same inputs regardless if you are using a Cluster or a SQL endpoint. All you need to provide is: +- the hostname of the Databricks workspace +- the HTTP path of the Databricks SQL warehouse or cluster +- an appropriate credential + + +### Better defaults + +With dbt-databricks, by default, dbt models will use the Delta format and expensive queries will be accelerated with the [Photon engine](https://docs.databricks.com/runtime/photon.html). See [the caveats section of Databricks Profile documentation](https://docs.getdbt.com/reference/warehouse-profiles/databricks-profile#choosing-between-dbt-databricks-and-dbt-spark) for more information. Any declared configurations of `file_format = 'delta'` are now redundant and can be removed. + +Additionally, dbt-databricks's default `incremental_strategy` is now `merge`. The default `incremental_strategy` with dbt-spark is `append`. +If you have been using the default `incremental_strategy=append` with dbt-spark, and would like to continue doing so, you'll have to set this config specifically on your incremental models. Read more [about `incremental_strategy` in dbt](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models#about-incremental_strategy). +If you already specified `incremental_strategy=merge` on your incremental models, you do not need to change anything when moving to dbt-databricks, though you could remove the param as it is now the default. + +### Pure Python (Core only) + +A huge benefit to Core only users is that with the new dbt-databricks adapter, you no longer have to download an independent driver to interact with Databricks. The connection information is all embedded in a pure-Python library, `databricks-sql-connector`. + + +## Migration +### dbt Cloud + +#### Credentials +If you are already successfully connected to Databricks using the dbt-spark ODBC method in dbt Cloud, then you have already supplied credentials in dbt Cloud to connect to your Databricks workspace. Each user will have added their Personal Access Token in their dbt Cloud profile for the given dbt project, which allows them to connect to Databricks in the dbt Cloud IDE, and additionally, an admin will have added an access token for each deployment environment, allowing for dbt Cloud to connect to Databricks during production jobs. + +When an admin changes the dbt Cloud's connection to use the dbt-databricks adapter instead of the dbt-spark adapter, your team will not lose their credentials. This makes migrating from dbt-spark to dbt-databricks straightforward as it only requires deleting the connection and re-adding the cluster/endpoint information. Both the admin and users of the project need not re-enter personal access tokens. + +#### Procedure + +An admin of the dbt Cloud project running on Databricks should take the following steps to migrate from using the generic Spark adapter to the Databricks-specfic adapter. This should not cause any downtime for production jobs, but we recommend that you schedule the connection change when there is not heavy IDE usage for your team to avoid disruption. + +1. Select **Account Settings** in the main navigation bar. +2. On the Projects tab, scroll until you find the project you'd like to migrate to the new dbt-databricks adapter. +3. Click the hyperlinked Connection for the project. +4. Click the "Edit" button in the top right corner. +5. Select Databricks for the warehouse +6. Select Databricks (dbt-databricks) for the adapter and enter: + 1. the `hostname` + 2. the `http_path` + 3. optionally the catalog name +7. Click save. + +After the above steps have been performed, all users will have to refresh their IDE before being able to start working again. It should complete in less than a minute. + + + + + +### dbt Core + +In dbt Core, migrating to the dbt-databricks adapter from dbt-spark requires that you: +1. install the new adapter in your environment, and +2. modify your target in your `~/.dbt/profiles.yml` + +These changes will be needed for all users of your project. + +#### Example + +If you're using `dbt-spark` today to connect to a Databricks SQL Endpoint, the below examples show a good before and after of how to authenticate. The cluster example is also effectively the same. + + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: spark + method: odbc + driver: '/opt/simba/spark/lib/64/libsparkodbc_sb64.so' + schema: my_schema + host: dbc-l33t-nwb.cloud.databricks.com + endpoint: 8657cad335ae63e3 + token: [my_secret_token] + +``` + + + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: databricks + schema: my_schema + host: dbc-l33t-nwb.cloud.databricks.com + http_path: /sql/1.0/endpoints/8657cad335ae63e3 + token: [my_secret_token] +``` + + \ No newline at end of file diff --git a/website/docs/guides/migration/tools/migrating-from-stored-procedures/1-migrating-from-stored-procedures.md b/website/docs/guides/migration/tools/migrating-from-stored-procedures/1-migrating-from-stored-procedures.md new file mode 100644 index 00000000000..aae8b373b2c --- /dev/null +++ b/website/docs/guides/migration/tools/migrating-from-stored-procedures/1-migrating-from-stored-procedures.md @@ -0,0 +1,27 @@ +--- +title: Migrating from DDL, DML, and stored procedures +id: 1-migrating-from-stored-procedures +--- + +One of the more common situations that new dbt adopters encounter is a historical codebase of transformations written as a hodgepodge of DDL and DML statements, or stored procedures. Going from DML statements to dbt models is often a challenging hump for new users to get over, because the process involves a significant paradigm shift between a procedural flow of building a dataset (e.g. a series of DDL and DML statements) to a declarative approach to defining a dataset (e.g. how dbt uses SELECT statements to express data models). This guide aims to provide tips, tricks, and common patterns for converting DML statements to dbt models. + +## Preparing to migrate + +Before getting into the meat of conversion, it’s worth noting that DML statements will not always illustrate a comprehensive set of columns and column types that an original table might contain. Without knowing the DDL to create the table, it’s impossible to know precisely if your conversion effort is apples-to-apples, but you can generally get close. + +If your supports `SHOW CREATE TABLE`, that can be a quick way to get a comprehensive set of columns you’ll want to recreate. If you don’t have the DDL, but are working on a substantial stored procedure, one approach that can work is to pull column lists out of any DML statements that modify the table, and build up a full set of the columns that appear. + +As for ensuring that you have the right column types, since models materialized by dbt generally use `CREATE TABLE AS SELECT` or `CREATE VIEW AS SELECT` as the driver for object creation, tables can end up with unintended column types if the queries aren’t explicit. For example, if you care about `INT` versus `DECIMAL` versus `NUMERIC`, it’s generally going to be best to be explicit. The good news is that this is easy with dbt: you just cast the column to the type you intend. + +We also generally recommend that column renaming and type casting happen as close to the source tables as possible, typically in a layer of staging transformations, which helps ensure that future dbt modelers will know where to look for those transformations! See [How we structure our dbt projects](/guides/best-practices/how-we-structure/1-guide-overview) for more guidance on overall project structure. + +### Operations we need to map + +There are four primary DML statements that you are likely to have to convert to dbt operations while migrating a procedure: + +- `INSERT` +- `UPDATE` +- `DELETE` +- `MERGE` + +Each of these can be addressed using various techniques in dbt. Handling `MERGE`s is a bit more involved than the rest, but can be handled effectively via dbt. The first three, however, are fairly simple to convert. diff --git a/website/docs/guides/migration/tools/migrating-from-stored-procedures/2-mapping-inserts.md b/website/docs/guides/migration/tools/migrating-from-stored-procedures/2-mapping-inserts.md new file mode 100644 index 00000000000..6a5ba57f1a8 --- /dev/null +++ b/website/docs/guides/migration/tools/migrating-from-stored-procedures/2-mapping-inserts.md @@ -0,0 +1,57 @@ +--- +title: Inserts +id: 2-inserts +--- + +An `INSERT` statement is functionally the same as using dbt to `SELECT` from an existing source or other dbt model. If you are faced with an `INSERT`-`SELECT` statement, the easiest way to convert the statement is to just create a new dbt model, and pull the `SELECT` portion of the `INSERT` statement out of the procedure and into the model. That’s basically it! + +To really break it down, let’s consider a simple example: + +```sql +INSERT INTO returned_orders (order_id, order_date, total_return) + +SELECT order_id, order_date, total FROM orders WHERE type = 'return' +``` + +Converting this with a first pass to a [dbt model](/docs/get-started/getting-started/building-your-first-project/build-your-first-models) (in a file called returned_orders.sql) might look something like: + +```sql +SELECT + order_id as order_id, + order_date as order_date, + total as total_return + +FROM {{ ref('orders') }} + +WHERE type = 'return' +``` + +Functionally, this would create a model (which could be materialized as a table or view depending on needs) called `returned_orders` that contains three columns: `order_id`, `order_date`, `total_return`) predicated on the type column. It achieves the same end as the `INSERT`, just in a declarative fashion, using dbt. + +## **A note on `FROM` clauses** + +In dbt, using a hard-coded table or view name in a `FROM` clause is one of the most serious mistakes new users make. dbt uses the ref and source macros to discover the ordering that transformations need to execute in, and if you don’t use them, you’ll be unable to benefit from dbt’s built-in lineage generation and pipeline execution. In the sample code throughout the remainder of this article, we’ll use ref statements in the dbt-converted versions of SQL statements, but it is an exercise for the reader to ensure that those models exist in their dbt projects. + +## **Sequential `INSERT`s to an existing table can be `UNION ALL`’ed together** + +Since dbt models effectively perform a single `CREATE TABLE AS SELECT` (or if you break it down into steps, `CREATE`, then an `INSERT`), you may run into complexities if there are multiple `INSERT` statements in your transformation that all insert data into the same table. Fortunately, this is a simple thing to handle in dbt. Effectively, the logic is performing a `UNION ALL` between the `INSERT` queries. If I have a transformation flow that looks something like (ignore the contrived nature of the scenario): + +```sql +CREATE TABLE all_customers + +INSERT INTO all_customers SELECT * FROM us_customers + +INSERT INTO all_customers SELECT * FROM eu_customers +``` + +The dbt-ified version of this would end up looking something like: + +```sql +SELECT * FROM {{ ref('us_customers') }} + +UNION ALL + +SELECT * FROM {{ ref('eu_customers') }} +``` + +The logic is functionally equivalent. So if there’s another statement that `INSERT`s into a model that I’ve already created, I can just add that logic into a second `SELECT` statement that is just `UNION ALL`'ed with the first. Easy! diff --git a/website/docs/guides/migration/tools/migrating-from-stored-procedures/3-mapping-updates.md b/website/docs/guides/migration/tools/migrating-from-stored-procedures/3-mapping-updates.md new file mode 100644 index 00000000000..b6f0874fb6b --- /dev/null +++ b/website/docs/guides/migration/tools/migrating-from-stored-procedures/3-mapping-updates.md @@ -0,0 +1,55 @@ +--- +title: Updates +id: 3-updates +--- + +`UPDATE`s start to increase the complexity of your transformations, but fortunately, they’re pretty darn simple to migrate, as well. The thought process that you go through when translating an `UPDATE` is quite similar to how an `INSERT` works, but the logic for the `SELECT` list in the dbt model is primarily sourced from the content in the `SET` section of the `UPDATE` statement. Let’s look at a simple example: + +```sql +UPDATE orders + +SET type = 'return' + +WHERE total < 0 +``` + +The way to look at this is similar to an `INSERT`-`SELECT` statement. The table being updated is the model you want to modify, and since this is an `UPDATE`, that model has likely already been created, and you can either: + +- add to it with subsequent transformations +- create an intermediate model that builds off of the original model – perhaps naming it something like `int_[entity]_[verb].sql`. + +The `SELECT` list should contain all of the columns for the table, but for the specific columns being updated by the DML, you’ll use the computation on the right side of the equals sign as the `SELECT`ed value. Then, you can use the target column name on the left of the equals sign as the column alias. + +If I were building an intermediate transformation from the above query would translate to something along the lines of: + +```sql +SELECT + CASE + WHEN total < 0 THEN 'return' + ELSE type + END AS type, + + order_id, + order_date + +FROM {{ ref('stg_orders') }} +``` + +Since the `UPDATE` statement doesn’t modify every value of the type column, we use a `CASE` statement to apply the contents’ `WHERE` clause. We still want to select all of the columns that should end up in the target table. If we left one of the columns out, it wouldn’t be passed through to the target table at all due to dbt’s declarative approach. + +Sometimes, you may not be sure what all the columns are in a table, or in the situation as above, you’re only modifying a small number of columns relative to the total number of columns in the table. It can be cumbersome to list out every column in the table, but fortunately dbt contains some useful utility macros that can help list out the full column list of a table. + +Another way I could have written the model a bit more dynamically might be: + +```sql +SELECT + {{ dbt_utils.star(from=ref('stg_orders'), except=['type']) }}, + CASE + WHEN total < 0 THEN 'return' + ELSE type + END AS type, + +FROM {{ ref('stg_orders') }} +``` + +The `dbt_utils.star()` macro will print out the full list of columns in the table, but skip the ones I’ve listed in the except list, which allows me to perform the same logic while writing fewer lines of code. This is a simple example of using dbt macros to simplify and shorten your code, and dbt can get a lot more sophisticated as you learn more techniques. Read more about the [dbt_utils package](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/) and the [star macro](https://github.com/dbt-labs/dbt-utils/tree/0.8.6/#star-source). diff --git a/website/docs/guides/migration/tools/migrating-from-stored-procedures/4-mapping-deletes.md b/website/docs/guides/migration/tools/migrating-from-stored-procedures/4-mapping-deletes.md new file mode 100644 index 00000000000..f7a6542acc7 --- /dev/null +++ b/website/docs/guides/migration/tools/migrating-from-stored-procedures/4-mapping-deletes.md @@ -0,0 +1,45 @@ +--- +title: Deletes +id: 4-deletes +--- + +One of the biggest differences between a procedural transformation and how dbt models data is that dbt, in general, will never destroy data. While there are ways to execute hard `DELETE`s in dbt that are outside of the scope of this article, the general best practice for handling deleted data is to just use soft deletes, and filter out soft-deleted data in a final transformation. + +Let’s consider a simple example query: + +```sql +DELETE FROM stg_orders WHERE order_status IS NULL +``` + +In a dbt model, you’ll need to first identify the records that should be deleted and then filter them out. There are really two primary ways you might translate this query: + +```sql +SELECT * FROM {{ ref('stg_orders') }} WHERE order_status IS NOT NULL +``` + +This first approach just inverts the logic of the DELETE to describe the set of records that should remain, instead of the set of records that should be removed. This ties back to the way dbt declaratively describes datasets. You reference the data that should be in a dataset, and the table or view gets created with that set of data. + +Another way you could achieve this is by marking the deleted records, and then filtering them out. For example: + +```sql +WITH + +soft_deletes AS ( + + SELECT + *, + CASE + WHEN order_status IS NULL THEN true + ELSE false + END AS to_delete + + FROM {{ ref('stg_orders') }} + +) + +SELECT * FROM soft_deletes WHERE to_delete = false +``` + +This approach flags all of the deleted records, and the final `SELECT` filters out any deleted data, so the resulting table contains only the remaining records. It’s a lot more verbose than just inverting the `DELETE` logic, but for complex `DELETE` logic, this ends up being a very effective way of performing the `DELETE` that retains historical context. + +It’s worth calling out that while this doesn’t enable a hard delete, hard deletes can be executed a number of ways, the most common being to execute a dbt [macros](https://docs.getdbt.com/docs/building-a-dbt-project/jinja-macros/#macros) via as a [run-operation](https://docs.getdbt.com/reference/commands/run-operation), or by using a [post-hook](https://docs.getdbt.com/reference/resource-configs/pre-hook-post-hook/) to perform a `DELETE` statement after the records to-be-deleted have been marked. These are advanced approaches outside the scope of this guide. diff --git a/website/docs/guides/migration/tools/migrating-from-stored-procedures/5-mapping-merges.md b/website/docs/guides/migration/tools/migrating-from-stored-procedures/5-mapping-merges.md new file mode 100644 index 00000000000..3a2c932c404 --- /dev/null +++ b/website/docs/guides/migration/tools/migrating-from-stored-procedures/5-mapping-merges.md @@ -0,0 +1,184 @@ +--- +title: Merges +id: 5-merges +--- + +dbt has a concept called [materialization](/docs/build/materializations), which determines how a model is physically or logically represented in the warehouse. `INSERT`s, `UPDATE`s, and `DELETE`s will typically be accomplished using table or view materializations. For incremental workloads accomplished via commands like `MERGE` or `UPSERT`, dbt has a particular materialization called [incremental](/docs/build/incremental-models). The incremental materialization is specifically used to handle incremental loads and updates to a table without recreating the entire table from scratch on every run. + +## Step 1: Map the MERGE like an INSERT/UPDATE to start + +Before we get into the exact details of how to implement an incremental materialization, let’s talk about logic conversion. Extracting the logic of the `MERGE` and handling it as you would an `INSERT` or an `UPDATE` is the easiest way to get started migrating a `MERGE` command. . + +To see how the logic conversion works, we’ll start with an example `MERGE`. In this scenario, imagine a ride sharing app where rides are loaded into an details table daily, and tips may be updated at some later date, and need to be kept up-to-date: + +```sql +MERGE INTO ride_details USING ( + SELECT + ride_id, + subtotal, + tip + + FROM rides_to_load AS rtl + + ON ride_details.ride_id = rtl.ride_id + + WHEN MATCHED THEN UPDATE + + SET ride_details.tip = rtl.tip + + WHEN NOT MATCHED THEN INSERT (ride_id, subtotal, tip) + VALUES (rtl.ride_id, rtl.subtotal, NVL(rtl.tip, 0, rtl.tip) +); +``` + +The content of the `USING` clause is a useful piece of code because that can easily be placed in a CTE as a starting point for handling the match statement. I find that the easiest way to break this apart is to treat each match statement as a separate CTE that builds on the previous match statements. + +We can ignore the `ON` clause for now, as that will only come into play once we get to a point where we’re ready to turn this into an incremental. + +As with `UPDATE`s and `INSERT`s, you can use the `SELECT` list and aliases to name columns appropriately for the target table, and `UNION` together `INSERT` statements (taking care to use `UNION`, rather than `UNION ALL` to avoid duplicates). + +The `MERGE` would end up translating to something like this: + +```sql +WITH + +using_clause AS ( + + SELECT + ride_id, + subtotal, + tip + + FROM {{ ref('rides_to_load') }} + +), + +updates AS ( + + SELECT + ride_id, + subtotal, + tip + + FROM using_clause + +), + +inserts AS ( + + SELECT + ride_id, + subtotal, + NVL(rtl.tip, 0, rtl.tip) + + FROM using_clause + +) + +SELECT * + +FROM updates + +UNION inserts +``` + +To be clear, this transformation isn’t complete. The logic here is similar to the `MERGE`, but will not actually do the same thing, since the updates and inserts CTEs are both selecting from the same source query. We’ll need to ensure we grab the separate sets of data as we transition to the incremental materialization. + +One important caveat is that dbt does not natively support `DELETE` as a `MATCH` action. If you have a line in your `MERGE` statement that uses `WHEN MATCHED THEN DELETE`, you’ll want to treat it like an update and add a soft-delete flag, which is then filtered out in a follow-on transformation. + +### Step 2: Convert to incremental materialization + +As mentioned above, incremental materializations are a little special in that when the target table does not exist, the materialization functions in nearly the same way as a standard table materialization, and executes a `CREATE TABLE AS SELECT` statement. If the target table does exist, however, the materialization instead executes a `MERGE` statement. + +Since a `MERGE` requires a `JOIN` condition between the `USING` clause and the target table, we need a way to specify how dbt determines whether or not a record triggers a match or not. That particular piece of information is specified in the dbt model configuration. + +We can add the following `config()` block to the top of our model to specify how it should build incrementally: + +```sql +{{ + config( + materialized='incremental', + unique_key='ride_id', + incremental_strategy='merge' + ) +}} +``` + +The three configuration fields in this example are the most important ones. + +- Setting `materialized='incremental'` tells dbt to apply UPSERT logic to the target table. +- The `unique_key` should be a primary key of the target table. This is used to match records with the existing table. +- `incremental_strategy` here is set to MERGE any existing rows in the target table with a value for the `unique_key` which matches the incoming batch of data. There are [various incremental strategies](/docs/build/incremental-models#about-incremental_strategy) for different situations and warehouses. + +The bulk of the work in converting a model to an incremental materialization comes in determining how the logic should change for incremental loads versus full backfills or initial loads. dbt offers a special macro, `is_incremental()`, which evaluates false for initial loads or for backfills (called full refreshes in dbt parlance), but true for incremental loads. + +This macro can be used to augment the model code to adjust how data is loaded for subsequent loads. How that logic should be added will depend a little bit on how data is received. Some common ways might be: + +1. The source table is truncated ahead of incremental loads, and only contains the data to be loaded in that increment. +2. The source table contains all historical data, and there is a load timestamp column that identifies new data to be loaded. + +In the first case, the work is essentially done already. Since the source table always contains only the new data to be loaded, the query doesn’t have to change for incremental loads. The second case, however, requires the use of the `is_incremental()` macro to correctly handle the logic. + +Taking the converted `MERGE` statement that we’d put together previously, we’d augment it to add this additional logic: + +```sql +WITH + +using_clause AS ( + + SELECT + ride_id, + subtotal, + tip, + max(load_timestamp) as load_timestamp + + FROM {{ ref('rides_to_load') }} + + + {% if is_incremental() %} + + WHERE load_timestamp > (SELECT max(load_timestamp) FROM {{ this }}) + + {% endif %} + +), + +updates AS ( + + SELECT + ride_id, + subtotal, + tip, + load_timestamp + + FROM using_clause + + {% if is_incremental() %} + + WHERE ride_id IN (SELECT ride_id FROM {{ this }}) + + {% endif %} + +), + +inserts AS ( + + SELECT + ride_id, + subtotal, + NVL(tip, 0, tip), + load_timestamp + + FROM using_clause + + WHERE ride_id NOT IN (SELECT ride_id FROM updates) + +) + +SELECT * FROM updates UNION inserts +``` + +There are a couple important concepts to understand here: + +1. The code in the `is_incremental()` conditional block only executes for incremental executions of this model code. If the target table doesn’t exist, or if the `--full-refresh` option is used, that code will not execute. +2. `{{ this }}` is a special keyword in dbt that when used in a Jinja block, self-refers to the model for which the code is executing. So if you have a model in a file called `my_incremental_model.sql`, `{{ this }}` will refer to `my_incremental_model` (fully qualified with database and schema name if necessary). By using that keyword, we can leverage the current state of the target table to inform the source query. diff --git a/website/docs/guides/migration/tools/migrating-from-stored-procedures/6-migrating-from-stored-procedures-conclusion.md b/website/docs/guides/migration/tools/migrating-from-stored-procedures/6-migrating-from-stored-procedures-conclusion.md new file mode 100644 index 00000000000..6fddf15c163 --- /dev/null +++ b/website/docs/guides/migration/tools/migrating-from-stored-procedures/6-migrating-from-stored-procedures-conclusion.md @@ -0,0 +1,6 @@ +--- +title: Putting it all together +id: 6-migrating-from-stored-procedures-conclusion +--- + +The techniques shared above are useful ways to get started converting the individual DML statements that are often found in stored procedures. Using these types of patterns, legacy procedural code can be rapidly transitioned to dbt models that are much more readable, maintainable, and benefit from software engineering best practices like DRY principles. Additionally, once transformations are rewritten as dbt models, it becomes much easier to test the transformations to ensure that the data being used downstream is high-quality and trustworthy. diff --git a/website/docs/guides/migration/versions/05-upgrading-to-v1.3.md b/website/docs/guides/migration/versions/05-upgrading-to-v1.3.md new file mode 100644 index 00000000000..3d687da9f9d --- /dev/null +++ b/website/docs/guides/migration/versions/05-upgrading-to-v1.3.md @@ -0,0 +1,57 @@ +--- +title: "Upgrading to v1.3 (latest)" +--- +### Resources + +- [Changelog](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md) +- [CLI Installation guide](/docs/get-started/installation) +- [Cloud upgrade guide](/docs/dbt-versions/upgrade-core-in-cloud) + +## Breaking changes + +We are committed to providing backward compatibility for all versions 1.x. If you encounter an error upon upgrading, please let us know by [opening an issue](https://github.com/dbt-labs/dbt-core/issues/new). + +There are three changes in dbt Core v1.3 that may require action from some users: +1. If you have a `profiles.yml` file located in the root directory where you run dbt, dbt will start preferring that profiles file over the default location on your machine. [You can read more details here](/docs/get-started/connection-profiles#advanced-customizing-a-profile-directory). +2. If you already have `.py` files defined in the `model-paths` of your dbt project, dbt will start trying to read them as Python models. You can use [the new `.dbtignore` file](dbtignore) to tell dbt to ignore those files. +3. If you have custom code accessing the `raw_sql` property of models (with the [model](dbt-jinja-functions/model) or [graph](/reference/dbt-jinja-functions/graph) objects), it has been renamed to `raw_code`. This is a change to the manifest contract, described in more detail below. + +### For users of dbt Metrics + +The names of metric properties have changed, with backward compatibility. Those changes are: +- Renamed `type` to `calculation_method` +- Renamed `sql` to `expression` +- Renamed `expression` calculation method metrics to `derived` calculation method metrics + +We plan to keep backward compatibility for a full minor version. Defining metrics with the old names will raise an error in dbt Core v1.4. + +### For consumers of dbt artifacts (metadata) + +We have updated the manifest schema version to `v7`. This includes the changes to metrics described above and a few other changes related to the addition of Python models: +- Renamed `raw_sql` to `raw_code` +- Renamed `compiled_sql` to `compiled_code` +- A new top-level node property, `language` (`'sql'` or `'python'`) + +For users of [state-based selection](understanding-state): This release includes logic providing backward and forward compatibility for older manifest versions. While running dbt Core v1.3, it should be possible to use `state:modified --state ...` selection against a manifest produced by dbt Core v1.0 and higher. + +### For maintainers of adapter plugins + +_GitHub discussion forthcoming_ + +**Notes:** +- The `statement` and `create_table_as` macros accept a new argument, `language`, with a default value of `'sql'` + +## New and changed documentation + +- **[Python models](building-models/python-models)** are natively supported in `dbt-core` for the first time, on data warehouses that support Python runtimes. +- Updates made to **[Metrics](build/metrics)** reflect their new syntax for definition, as well as additional properties that are now available. +- Plus, a few related updates to **[exposure properties](exposure-properties)**: `config`, `label`, and `name` validation. + +- **[Custom `node_color`](/reference/resource-configs/docs.md)** in `dbt-docs`. For the first time, you can control the colors displayed in dbt's DAG. Want bronze, silver, and gold layers? It's at your fingertips. +- **[`Profiles.yml`](/docs/get-started/connection-profiles#advanced-customizing-a-profile-directory)** search order now looks in the current working directory before `~/.dbt`. + + +### Quick hits +- **["Full refresh"](full_refresh)** flag supports a short name, `-f`. +- **[The "config" selection method](methods#the-config-method)** supports boolean and list config values, in addition to strings. +- Two new dbt-Jinja context variables for accessing invocation metadata: [`invocation_args_dict`](flags#invocation_args_dict) and [`dbt_metadata_envs`](env_var#custom-metadata). diff --git a/website/docs/guides/migration/versions/06-upgrading-to-v1.2.md b/website/docs/guides/migration/versions/06-upgrading-to-v1.2.md new file mode 100644 index 00000000000..51f1f8d0a01 --- /dev/null +++ b/website/docs/guides/migration/versions/06-upgrading-to-v1.2.md @@ -0,0 +1,36 @@ +--- +title: "Upgrading to v1.2" +--- +### Resources + +- [Changelog](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md) +- [CLI Installation guide](/docs/get-started/installation) +- [Cloud upgrade guide](/docs/dbt-versions/upgrade-core-in-cloud) + +## Breaking changes + +There are no breaking changes for code in dbt projects and packages. We are committed to providing backwards compatibility for all versions 1.x. If you encounter an error upon upgrading, please let us know by [opening an issue](https://github.com/dbt-labs/dbt-core/issues/new). + +### For consumers of dbt artifacts (metadata) + +The manifest schema version has been updated to `v6`. The relevant changes are: +- Change to `config` default, which includes a new `grants` property with default value `{}` +- Addition of a `metrics` property, to any node which could reference metrics using the `metric()` function + +For users of [state-based selection](understanding-state): This release also includes new logic declaring forwards compatibility for older manifest versions. While running dbt Core v1.2, it should be possible to use `state:modified --state ...` selection against a manifest produced by dbt Core v1.0 or v1.1. + +## For maintainers of adapter plugins + +See GitHub discussion [dbt-labs/dbt-core#5468](https://github.com/dbt-labs/dbt-core/discussions/5468) for detailed information + +## New and changed functionality + +- **[Grants](resource-configs/grants)** are natively supported in `dbt-core` for the first time. That support extends to all standard materializations, and the most popular adapters. If you already use hooks to apply simple grants, we encourage you to use built-in `grants` to configure your models, seeds, and snapshots instead. This will enable you to [DRY](https://en.wikipedia.org/wiki/Don%27t_repeat_yourself) up your duplicated or boilerplate code. +- **[Metrics](/docs/build/metrics)** now support an `expression` type (metrics-on-metrics), as well as a `metric()` function to use when referencing metrics from within models, macros, or `expression`-type metrics. For more information on how to use expression metrics, check out the [**`dbt_metrics` package**](https://github.com/dbt-labs/dbt_metrics) +- **[dbt-Jinja functions](/reference/dbt-jinja-functions)** now include the [`itertools` Python module](dbt-jinja-functions/modules#itertools), as well as the [set](dbt-jinja-functions/set) and [zip](dbt-jinja-functions/zip) functions. +- **[Node selection](node-selection/syntax)** includes a [file selection method](node-selection/methods#the-file-method) (`-s model.sql`), and [yaml selector](node-selection/yaml-selectors) inheritance. +- **[Global configs](global-configs)** now include CLI flag and environment variable settings for [`target-path`](target-path) and [`log-path`](log-path), which can be used to override the values set in `dbt_project.yml` + +### Specific adapters + +- [Postgres](/reference/warehouse-setups/postgres-setup) and [Redshift](/reference/warehouse-setups/redshift-setup) profiles support a `retries` config, if dbt encounters an operational error or timeout when opening a connection. The default is 1 retry. diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-v1.1.md b/website/docs/guides/migration/versions/07-upgrading-to-v1.1.md similarity index 57% rename from website/docs/docs/guides/migration-guide/upgrading-to-v1.1.md rename to website/docs/guides/migration/versions/07-upgrading-to-v1.1.md index 82d1d024faf..382fb2e5d7a 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-v1.1.md +++ b/website/docs/guides/migration/versions/07-upgrading-to-v1.1.md @@ -1,21 +1,19 @@ --- -title: "Upgrading to v1.1 (latest)" - +title: "Upgrading to v1.1" --- - ### Resources - [Changelog](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md) -- [CLI Installation guide](/dbt-cli/install/overview) -- [Cloud upgrade guide](/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version) +- [CLI Installation guide](/docs/get-started/installation) +- [Cloud upgrade guide](/docs/dbt-versions/upgrade-core-in-cloud) ## Breaking changes -There are no breaking changes for end users of dbt. We are committed to providing backwards compatibility for all versions 1.x. If you encounter an error upon upgrading, please let us know by [opening an issue](https://github.com/dbt-labs/dbt-core/issues/new). +There are no breaking changes for code in dbt projects and packages. We are committed to providing backwards compatibility for all versions 1.x. If you encounter an error upon upgrading, please let us know by [opening an issue](https://github.com/dbt-labs/dbt-core/issues/new). ### For maintainers of adapter plugins -We have reworked the testing suite for adapter plugin functionality. For details on the new testing suite, see: ["Testing a new adapter"](testing-a-new-adapter) +We have reworked the testing suite for adapter plugin functionality. For details on the new testing suite, see: [Testing a new adapter](/guides/advanced/adapter-development/4-testing-a-new-adapter). The abstract methods `get_response` and `execute` now only return `connection.AdapterReponse` in type hints. Previously, they could return a string. We encourage you to update your methods to return an object of class `AdapterResponse`, or implement a subclass specific to your adapter. This also gives you the opportunity to add fields specific to your adapter's query execution, such as `rows_affected` or `bytes_processed`. @@ -23,9 +21,19 @@ The abstract methods `get_response` and `execute` now only return `connection.Ad The manifest schema version will be updated to v5. The only change is to the default value of `config` for parsed nodes. +For users of [state-based functionality](understanding-state), such as the `state:modified` selector, recall that: + +> The `--state` artifacts must be of schema versions that are compatible with the currently running dbt version. + +If you have two jobs, whereby one job compares or defers to artifacts produced by the other, you'll need to upgrade both at the same time. If there's a mismatch, dbt will alert you with this error message: + +``` +Expected a schema version of "https://schemas.getdbt.com/dbt/manifest/v5.json" in /manifest.json, but found "https://schemas.getdbt.com/dbt/manifest/v4.json". Are you running with a different version of dbt? +``` + ## New and changed documentation -[**Incremental models**](configuring-incremental-models) can now accept a list of multiple columns as their `unique_key`, for models that need a combination of columns to uniquely identify each row. This is supported by the most common data warehouses, for incremental strategies that make use of the `unique_key` config (`merge` and `delete+insert`). +[**Incremental models**](/docs/build/incremental-models) can now accept a list of multiple columns as their `unique_key`, for models that need a combination of columns to uniquely identify each row. This is supported by the most common data warehouses, for incremental strategies that make use of the `unique_key` config (`merge` and `delete+insert`). [**Generic tests**](resource-properties/tests) can define custom names. This is useful to "prettify" the synthetic name that dbt applies automatically. It's needed to disambiguate the case when the same generic test is defined multiple times with different configurations. @@ -33,22 +41,23 @@ The manifest schema version will be updated to v5. The only change is to the def ### Advanced and experimental functionality -**Fresh Rebuilds.** There's a new _experimental_ selection method in town: [`source_status:fresher`](node-selection/methods#the-source_status-method). Much like the `state:` and `result` methods, the goal is to use dbt metadata to run your DAG more efficiently. If dbt has access to previous and current results of `dbt source freshness` (the `sources.json` artifact), dbt can compare them to determine which sources have loaded new data, and select only resources downstream of "fresher" sources. Read more in ["Understanding State"](understanding-state) and ["CI/CD in dbt Cloud"](cloud-enabling-continuous-integration-with-github). +**Fresh Rebuilds.** There's a new _experimental_ selection method in town: [`source_status:fresher`](node-selection/methods#the-source_status-method). Much like the `state:` and `result` methods, the goal is to use dbt metadata to run your DAG more efficiently. If dbt has access to previous and current results of `dbt source freshness` (the `sources.json` artifact), dbt can compare them to determine which sources have loaded new data, and select only resources downstream of "fresher" sources. Read more in [Understanding State](understanding-state) and [CI/CD in dbt Cloud](/docs/deploy/cloud-ci-job). [**dbt-Jinja functions**](/reference/dbt-jinja-functions) have a new landing page, and two new members: -- [`print`](dbt-jinja-functions/print) exposes the Python `print()` function. It can be used as an alternative to `log()`, and together with the `QUIET` config, for advanced macro-driven workflows. -- [`selected_resources`](dbt-jinja-functions/selected_resources) exposes, at runtime, the list of DAG nodes selected by the current task. +- [`print`](/reference/dbt-jinja-functions/print) exposes the Python `print()` function. It can be used as an alternative to `log()`, and together with the `QUIET` config, for advanced macro-driven workflows. +- [`selected_resources`](/reference/dbt-jinja-functions/selected_resources) exposes, at runtime, the list of DAG nodes selected by the current task. [**Global configs**](global-configs) include some new additions: + - `QUIET` and `NO_PRINT`, to control which log messages dbt prints to terminal output. For use in advanced macro-driven workflows, such as [codegen](https://hub.getdbt.com/dbt-labs/codegen/latest/). - `CACHE_SELECTED_ONLY` is an _experimental_ config that can significantly speed up dbt's start-of-run preparations, in cases where you're running only a few models from a large project that manages many schemas. ### For users of specific adapters -**dbt-bigquery** added Support for finer-grained configuration of query timeout and retry when defining your [connection profile](bigquery-profile). +**dbt-bigquery** added Support for finer-grained configuration of query timeout and retry when defining your [connection profile](/reference/warehouse-setups/bigquery-setup). -**dbt-spark** added support for a [`session` connection method](spark-profile#session), for use with a pySpark session, to support rapid iteration when developing advanced or experimental functionality. This connection method is not recommended for new users, and it is not supported in dbt Cloud. +**dbt-spark** added support for a [`session` connection method](/reference/warehouse-setups/spark-setup#session), for use with a pySpark session, to support rapid iteration when developing advanced or experimental functionality. This connection method is not recommended for new users, and it is not supported in dbt Cloud. ### Dependencies diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-v1.0.md b/website/docs/guides/migration/versions/08-upgrading-to-v1.0.md similarity index 92% rename from website/docs/docs/guides/migration-guide/upgrading-to-v1.0.md rename to website/docs/guides/migration/versions/08-upgrading-to-v1.0.md index 8d583fe7baf..de8a5690521 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-v1.0.md +++ b/website/docs/guides/migration/versions/08-upgrading-to-v1.0.md @@ -1,14 +1,12 @@ --- title: "Upgrading to v1.0" - --- - ### Resources - [Discourse](https://discourse.getdbt.com/t/3180) - [Changelog](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md) -- [CLI Installation guide](/dbt-cli/install/overview) -- [Cloud upgrade guide](/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version) +- [CLI Installation guide](/docs/get-started/installation) +- [Cloud upgrade guide](/docs/dbt-versions/upgrade-core-in-cloud) ## Breaking changes @@ -28,7 +26,7 @@ dbt Core major version 1.0 includes a number of breaking changes! Wherever possi ### Tests -The two **test types** are now "singular" and "generic" (instead of "data" and "schema", respectively). The `test_type:` selection method accepts `test_type:singular` and `test_type:generic`. (It will also accept `test_type:schema` and `test_type:data` for backwards compatibility.) **Not backwards compatible:** The `--data` and `--schema` flags to dbt test are no longer supported, and tests no longer have the tags `'data'` and `'schema'` automatically applied. Updated docs: [tests](building-a-dbt-project/tests), [test selection](test-selection-examples), [selection methods](node-selection/methods). +The two **test types** are now "singular" and "generic" (instead of "data" and "schema", respectively). The `test_type:` selection method accepts `test_type:singular` and `test_type:generic`. (It will also accept `test_type:schema` and `test_type:data` for backwards compatibility.) **Not backwards compatible:** The `--data` and `--schema` flags to dbt test are no longer supported, and tests no longer have the tags `'data'` and `'schema'` automatically applied. Updated docs: [tests](/docs/build/tests), [test selection](test-selection-examples), [selection methods](node-selection/methods). The `greedy` flag/property has been renamed to **`indirect_selection`**, which is now eager by default. **Note:** This reverts test selection to its pre-v0.20 behavior by default. `dbt test -s my_model` _will_ select multi-parent tests, such as `relationships`, that depend on unselected resources. To achieve the behavior change in v0.20 + v0.21, set `--indirect-selection=cautious` on the CLI or `indirect_selection: cautious` in yaml selectors. Updated docs: [test selection examples](test-selection-examples), [yaml selectors](yaml-selectors) @@ -38,7 +36,7 @@ Global project macros have been reorganized, and some old unused macros have bee ### Installation -- [Installation docs](install/overview) reflects adapter-specific installations +- [Installation docs](/docs/supported-data-platforms) reflects adapter-specific installations - `pip install dbt` is no longer supported, and will raise an explicit error. Install the specific adapter plugin you need as `pip install dbt-`. - `brew install dbt` is no longer supported. Install the specific adapter plugin you need (among Postgres, Redshift, Snowflake, or BigQuery) as `brew install dbt-`. - Removed official support for python 3.6, which is reaching end of life on December 23, 2021 @@ -69,5 +67,5 @@ Several under-the-hood changes from past minor versions, tagged with deprecation - [Parsing](parsing): partial parsing and static parsing have been turned on by default. - [Global configs](global-configs) have been standardized. Related updates to [global CLI flags](global-cli-flags) and [`profiles.yml`](profiles.yml). - [The `init` command](init) has a whole new look and feel. It's no longer just for first-time users. -- Add `result:` subselectors for smarter reruns when dbt models have errors and tests fail. See examples: [Pro-tips for Workflows](/docs/guides/best-practices.md#pro-tips-for-workflows) +- Add `result:` subselectors for smarter reruns when dbt models have errors and tests fail. See examples: [Pro-tips for Workflows](/guides/legacy/best-practices#pro-tips-for-workflows) - Secret-prefixed [env vars](env_var) are now allowed only in `profiles.yml` + `packages.yml` diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-v0.21.md b/website/docs/guides/migration/versions/09-upgrading-to-v0.21.md similarity index 85% rename from website/docs/docs/guides/migration-guide/upgrading-to-v0.21.md rename to website/docs/guides/migration/versions/09-upgrading-to-v0.21.md index a575303712a..b25abad32f2 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-v0.21.md +++ b/website/docs/guides/migration/versions/09-upgrading-to-v0.21.md @@ -11,7 +11,7 @@ dbt Core v0.21 has reached the end of critical support. No new patch versions wi - [Discourse](https://discourse.getdbt.com/t/3077) - [Release notes](https://github.com/dbt-labs/dbt-core/releases/tag/v0.21.0) -- [Full changelog](https://github.com/fishtown-analytics/dbt/blob/0.21.latest/CHANGELOG.md) +- [Full changelog](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md) ## Breaking changes @@ -39,12 +39,12 @@ dbt Core v0.21 has reached the end of critical support. No new patch versions wi ### Elsewhere in Core - [Resource configs and properties](configs-and-properties) docs have been consolidated and reconciled. New `config` property that makes it possible to configure models, seeds, snapshots, and tests in all yaml files. -- [Configuring incremental models](configuring-incremental-models): New optional configuration for incremental models, `on_schema_change`. +- [Configuring incremental models](/docs/build/incremental-models): New optional configuration for incremental models, `on_schema_change`. - [Environment variables](env_var): Add a log-scrubbing prefix, `DBT_ENV_SECRET_` - [Test `where` config](where) has been reimplemented as a macro (`get_where_subquery`) that you can optionally reimplement, too - [`dispatch`](dispatch) now supports reimplementing global macros residing in the `dbt` macro namespace with versions from installed packages, by leveraging `search_order` in the [`dispatch` project config](project-configs/dispatch-config) ### Plugins -- **Postgres** [profile](postgres-profile) property `connect_timeout` now configurable. Also applicable to child plugins (e.g. `dbt-redshift`) -- **Redshift**: [profile](redshift-profile) property `ra3_node: true` to support cross-database source definitions and read-only querying -- **BigQuery**: [profile](bigquery-profile) property `execution_project` now configurable. [Snapshots](snapshots) support `target_project` and `target_dataset` config aliases. +- **Postgres** [profile](/reference/warehouse-setups/postgres-setup) property `connect_timeout` now configurable. Also applicable to child plugins (e.g. `dbt-redshift`) +- **Redshift**: [profile](/reference/warehouse-setups/redshift-setup) property `ra3_node: true` to support cross-database source definitions and read-only querying +- **BigQuery**: [profile](/reference/warehouse-setups/bigquery-setup) property `execution_project` now configurable. [Snapshots](snapshots) support `target_project` and `target_dataset` config aliases. diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-v0.20.md b/website/docs/guides/migration/versions/10-upgrading-to-v0.20.md similarity index 87% rename from website/docs/docs/guides/migration-guide/upgrading-to-v0.20.md rename to website/docs/guides/migration/versions/10-upgrading-to-v0.20.md index aa61bcc66d4..7065b567bd8 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-v0.20.md +++ b/website/docs/guides/migration/versions/10-upgrading-to-v0.20.md @@ -11,7 +11,7 @@ dbt Core v0.20 has reached the end of critical support. No new patch versions wi - [Discourse](https://discourse.getdbt.com/t/2621) - [Release notes](https://github.com/dbt-labs/dbt-core/releases/tag/v0.20.0) -- [Full changelog](https://github.com/fishtown-analytics/dbt/blob/0.20.latest/CHANGELOG.md) +- [Full changelog](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md) ## Breaking changes @@ -23,7 +23,8 @@ dbt Core v0.20 has reached the end of critical support. No new patch versions wi ## New and changed documentation ### Tests -- [Building a dbt Project: tests](building-a-dbt-project/tests) + +- [Building a dbt Project: tests](/docs/build/tests) - [Test Configs](test-configs) - [Test properties](resource-properties/tests) - [Node Selection](node-selection/syntax) (with updated [test selection examples](test-selection-examples)) @@ -32,12 +33,12 @@ dbt Core v0.20 has reached the end of critical support. No new patch versions wi ### Elsewhere in Core - [Parsing](parsing): rework of partial parsing, introduction of experimental parser - The [graph](graph) Jinja context variable includes `exposures` -- [Packages](package-management) can now be installed from git with a specific commit hash as the revision, or via sparse checkout if the dbt project is located in a `subdirectory`. +- [Packages](/docs/build/packages) can now be installed from git with a specific commit hash as the revision, or via sparse checkout if the dbt project is located in a `subdirectory`. - [adapter.dispatch](dispatch) supports new arguments, a new [project-level config](project-configs/dispatch-config), and includes parent adapters when searching for macro implementations. - [Exposures](exposure-properties) support `tags` and `meta` properties ### Plugins - New partition-related [BigQuery configs](bigquery-configs#additional-partition-configs): `require_partition_filter` and `partition_expiration_days` - On BigQuery, dbt can now add [query comment](query-comment) items as job labels -- Snowflake and BigQuery [incremental models](configuring-incremental-models#strategy-specific-configs) using the `merge` strategy accept a new optional config, `merge_update_columns`. +- Snowflake and BigQuery [incremental models](/docs/build/incremental-models#strategy-specific-configs) using the `merge` strategy accept a new optional config, `merge_update_columns`. - [Postgres configs](postgres-configs) now include first-class support for `indexes` diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-11-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-11-0.md similarity index 100% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-11-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-11-0.md diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-12-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-12-0.md similarity index 74% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-12-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-12-0.md index ca97c0a4242..60900d3c1a4 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-0-12-0.md +++ b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-12-0.md @@ -6,4 +6,4 @@ id: "upgrading-to-0-12-0" ## End of support Support for the `repositories:` block in `dbt_project.yml` (deprecated in 0.10.0) was removed. -In order to install packages in your dbt project, you must use [a `packages.yml` file](package-management#how-do-i-add-a-package-to-my-project). +In order to install packages in your dbt project, you must use [a `packages.yml` file](/docs/build/packages#how-do-i-add-a-package-to-my-project). diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-13-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-13-0.md similarity index 93% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-13-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-13-0.md index 0e6c8cc9bfc..7834c330b5c 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-0-13-0.md +++ b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-13-0.md @@ -25,4 +25,4 @@ A number of materialization-specific adapter methods have changed in breaking wa Version 1 schema.yml specs (deprecated in 0.11.0) are no longer supported. Please use the version 2 spec instead. -See the [0.11.0 migration guide](upgrading-to-0-11-0#schemayml-v2-syntax) for details. +See the [0.11.0 migration guide](upgrading-to-0-11-0.md#schemayml-v2-syntax) for details. diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-14-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-14-0.md similarity index 99% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-14-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-14-0.md index 8a1a5faa458..c22bd1490bb 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-0-14-0.md +++ b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-14-0.md @@ -118,7 +118,7 @@ alter table archived.orders_archived rename "scd_id" to dbt_scd_id; ## Upgrading the generate_schema_name signature -In dbt v0.14.0, the `generate_schema_name` macro signature was changed to accept a second argument, `node`. For more information on the new `node` argument, consulting the documentation for [using custom schemas](using-custom-schemas). +In dbt v0.14.0, the `generate_schema_name` macro signature was changed to accept a second argument, `node`. For more information on the new `node` argument, consulting the documentation for [using custom schemas](/docs/build/custom-schemas). Existing one-argument implementations of `generate_schema_name` macros are still supported, but support for this form of the macro will be dropped in a future release. If you currently have a one-argument version of this macro, you will see a warning when you run your dbt project. diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-14-1.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-14-1.md similarity index 100% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-14-1.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-14-1.md diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-15-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-15-0.md similarity index 100% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-15-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-15-0.md diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-16-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-16-0.md similarity index 94% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-16-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-16-0.md index 9590eca0379..8936f0ff3fc 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-0-16-0.md +++ b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-16-0.md @@ -28,7 +28,7 @@ information on the change. ### One-argument generate_schema_name deprecation Support for the one-argument variant of `generate_schema_name` macros (deprecated in a previous release) are no longer supported. If you are using the one-argument -variant of `generate_schema_name`, see [the docs on custom schemas](using-custom-schemas) +variant of `generate_schema_name`, see [the docs on custom schemas](/docs/build/custom-schemas) for an example of how to use the two-argument variant of `generate_schema_name`. ### BigQuery partition_by syntax @@ -104,12 +104,12 @@ please be mindful of the following changes to dbt's Python dependencies: ## New and changed documentation - [BigQuery partitioning configs](bigquery-configs) - [Select specific seeds to run with `--select`](seed) -- [New `generate_database_name` macro](using-custom-databases) +- [New `generate_database_name` macro](/docs/build/custom-databases#generate_database_name) - [New `dbt_project.yml context`](dbt-project-yml-context) - [New configurations for schema.yml files](configs-and-properties) -- [New configurations for Source declarations](using-sources) -- [New Postgres connection configs](postgres-profile) -- [New Snowflake KeyPair auth configs](snowflake-profile) +- [New configurations for Source declarations](/docs/build/sources) +- [New Postgres connection configs](/reference/warehouse-setups/postgres-setup) +- [New Snowflake KeyPair auth configs](/reference/warehouse-setups/snowflake-setup) - [New `builtins` jinja context variable](builtins) - [New `fromyaml` context method](fromyaml) - [New `toyaml` context method](toyaml) diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-17-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-17-0.md similarity index 92% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-17-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-17-0.md index fedf3362bdc..7d7f70baa14 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-0-17-0.md +++ b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-17-0.md @@ -196,7 +196,7 @@ models: ### Accessing sources in the `graph` object -In previous versions of dbt, the `sources` in a dbt project could be accessed in the compilation context using the [graph.nodes](dbt-jinja-functions/graph) context variable. In dbt v0.17.0, these sources have been moved out of the `graph.nodes` dictionary and into a new `graph.sources` dictionary. This change is also reflected in the `manifest.json` artifact produced by dbt. If you are accessing these sources programmatically, please update any references from `graph.nodes` to `graph.sources` instead. +In previous versions of dbt, the `sources` in a dbt project could be accessed in the compilation context using the [graph.nodes](/reference/dbt-jinja-functions/graph) context variable. In dbt v0.17.0, these sources have been moved out of the `graph.nodes` dictionary and into a new `graph.sources` dictionary. This change is also reflected in the `manifest.json` artifact produced by dbt. If you are accessing these sources programmatically, please update any references from `graph.nodes` to `graph.sources` instead. ### BigQuery `locations` removed from Catalog @@ -248,11 +248,11 @@ BigQuery: **Core** - [`path:` selectors](node-selection/methods#the-path-method) - [`--fail-fast`](commands/run#failing-fast) -- [as_text Jinja filter](dbt-jinja-functions/as_text) -- [accessing nodes in the `graph` object](dbt-jinja-functions/graph) +- [as_text Jinja filter](/reference/dbt-jinja-functions/as_text) +- [accessing nodes in the `graph` object](/reference/dbt-jinja-functions/graph) - [persist_docs](resource-configs/persist_docs) - [source properties](reference/source-properties) - [source overrides](resource-properties/overrides) **BigQuery** -- [maximum_bytes_billed](bigquery-profile#maximum-bytes-billed) +- [maximum_bytes_billed](/reference/warehouse-setups/bigquery-setup#maximum-bytes-billed) diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-18-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-18-0.md similarity index 90% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-18-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-18-0.md index efdc2162a7e..de0e6d6cb8b 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-0-18-0.md +++ b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-18-0.md @@ -71,13 +71,13 @@ can override schema test definitions - [project-level overviews](documentation#custom-project-level-overviews) **Redshift** -- [`iam_profile`](redshift-profile#specifying-an-iam-profile) +- [`iam_profile`](/reference/warehouse-setups/redshift-setup#specifying-an-iam-profile) **Snowflake** -- `query_tag` in [profile](snowflake-profile), [model config](snowflake-configs#query-tags) +- `query_tag` in [profile](/reference/warehouse-setups/snowflake-setup), [model config](snowflake-configs#query-tags) - automatic SSO [session caching](snowflake-configs#sso-authentication) support **BigQuery** -- [`impersonate_service_account`](bigquery-profile#service-account-impersonation) +- [`impersonate_service_account`]/reference/warehouse-setups/bigquery-setup#service-account-impersonation) - [`policy_tags`](bigquery-configs#policy-tags) - [`hours_to_expiration`](bigquery-configs#controlling-table-expiration) diff --git a/website/docs/docs/guides/migration-guide/upgrading-to-0-19-0.md b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-19-0.md similarity index 86% rename from website/docs/docs/guides/migration-guide/upgrading-to-0-19-0.md rename to website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-19-0.md index 0473e309d86..c5ed7feb6ce 100644 --- a/website/docs/docs/guides/migration-guide/upgrading-to-0-19-0.md +++ b/website/docs/guides/migration/versions/11-Older versions/upgrading-to-0-19-0.md @@ -7,7 +7,7 @@ title: "Upgrading to 0.19.0" - [Discourse](https://discourse.getdbt.com/t/1951) - [Release notes](https://github.com/dbt-labs/dbt-core/releases/tag/v0.19.0) -- [Full changelog](https://github.com/fishtown-analytics/dbt/blob/0.19.latest/CHANGELOG.md) +- [Full changelog](https://github.com/dbt-labs/dbt-core/blob/0.19.latest/CHANGELOG.md) ## Breaking changes @@ -23,7 +23,7 @@ See the docs below for more details. We don't expect these to require action in #### Deprecations -Removed support for `config-version: 1` of dbt_project.yml, which was deprecated in v0.17.0. Use `config-version: 2` in all projects and installed packages. Otherwise, dbt will raise an error. See docs on [config-version](config-version) and the [v0.17.0 Migration Guide](upgrading-to-0-17-0) for details. +Removed support for `config-version: 1` of dbt_project.yml, which was deprecated in v0.17.0. Use `config-version: 2` in all projects and installed packages. Otherwise, dbt will raise an error. See docs on [config-version](config-version) and the [v0.17.0 Migration Guide](/guides/migration/versions) for details. ### For dbt plugin maintainers @@ -42,7 +42,7 @@ See [dbt#2961](https://github.com/dbt-labs/dbt-core/pull/2961) for full implemen ## New and changed documentation ### Core -- [dbt Artifacts](dbt-artifacts): The JSON artifacts produced by dbt—manifest, catalog, run results, and sources—are simpler to consume and more clearly documented. +- [dbt Artifacts](dbt-artifacts): The artifacts produced by dbt—manifest, catalog, run results, and sources—are simpler to consume and more clearly documented. - [dbt Classes](dbt-classes#result-objects), [on-run-end Context](on-run-end-context#results): The `Result` object has a new schema, in line with changes to `run_results.json`. - [Statement blocks](statement-blocks): The `call statement` result `status` string is now a structured object named `response`. - [Snapshots](snapshots#invalidate_hard_deletes): If the config `invalidate_hard_deletes` is enabled, `dbt snapshot` will update records whose unique key no longer exist in the snapshot query. Should those uniquely identified records "revive," `dbt snapshot` will re-add them. @@ -56,8 +56,8 @@ See [dbt#2961](https://github.com/dbt-labs/dbt-core/pull/2961) for full implemen - [RPC](rpc): Added `state` and `defer` as arguments to RPC methods for which it is supported on the CLI. ### BigQuery -- [BigQuery profile](bigquery-profile): dbt can connect via OAuth tokens (one-time or refresh), and it can use the default project when connecting via `gcloud` oauth. +- [BigQuery profile](/reference/warehouse-setups/bigquery-setup): dbt can connect via OAuth tokens (one-time or refresh), and it can use the default project when connecting via `gcloud` oauth. - [Hourly, monthly and yearly partitions](bigquery-configs#partitioning-by-a-date-or-timestamp): With a new `granularity` attribute of the `partition_by` config, dbt can materialize models as tables partitioned by hour, month, or year. ### Spark -- [Spark profile](spark-profile): The `thrift` and `http` connection methods require installation of a `PyHive` extra. +- [Spark profile](/reference/warehouse-setups/spark-setup): The `thrift` and `http` connection methods require installation of a `PyHive` extra. diff --git a/website/docs/guides/orchestration/airflow-and-dbt-cloud/1-airflow-and-dbt-cloud.md b/website/docs/guides/orchestration/airflow-and-dbt-cloud/1-airflow-and-dbt-cloud.md new file mode 100644 index 00000000000..86a5c669be6 --- /dev/null +++ b/website/docs/guides/orchestration/airflow-and-dbt-cloud/1-airflow-and-dbt-cloud.md @@ -0,0 +1,55 @@ +--- +title: Airflow and dbt Cloud +id: 1-airflow-and-dbt-cloud +--- + +In some cases, [Airflow](https://airflow.apache.org/) may be the preferred orchestrator for your organization over working fully within dbt Cloud. There are a few reasons your team might be considering using Airflow to orchestrate your dbt jobs: + +- Your team is already using Airflow to orchestrate other processes +- Your team needs to ensure that a [dbt job](https://docs.getdbt.com/docs/dbt-cloud/cloud-overview#schedule-and-run-dbt-jobs-in-production) kicks off before or after another process outside of dbt Cloud +- Your team needs flexibility to manage more complex scheduling, such as kicking off one dbt job only after another has completed +- Your team wants to own their own orchestration solution +- You need code to work right now without starting from scratch + +## How are people using Airflow + dbt today? + +### Airflow + dbt Core + +There are so many great examples from Gitlab through their open source data engineering work. Example: [here](https://gitlab.com/gitlab-data/analytics/-/blob/master/dags/transformation/dbt_snowplow_backfill.py). This is especially appropriate if you are well-versed in Kubernetes, CI/CD, and docker task management when building your airflow pipelines. If this is you and your team, you’re in good hands reading through more details: [here](https://about.gitlab.com/handbook/business-technology/data-team/platform/infrastructure/#airflow) and [here](https://about.gitlab.com/handbook/business-technology/data-team/platform/dbt-guide/) + +### Airflow + dbt Cloud API w/Custom Scripts + +This has served as a bridge until the fabled Astronomer + dbt Labs-built dbt Cloud provider became generally available: [here](https://registry.astronomer.io/providers/dbt-cloud?type=Sensors&utm_campaign=Monthly%20Product%20Updates&utm_medium=email&_hsmi=208603877&utm_content=208603877&utm_source=hs_email) + +There are many different permutations of this over time: + +- [Custom Python Scripts](https://github.com/sungchun12/airflow-dbt-cloud/blob/main/archive/dbt_cloud_example.py): This is an airflow DAG based on custom python API utilities [here](https://github.com/sungchun12/airflow-dbt-cloud/blob/main/archive/dbt_cloud_utils.py) +- [Make API requests directly through the BashOperator based on the docs](https://docs.getdbt.com/dbt-cloud/api-v2#operation/triggerRun): You can make cURL requests to invoke dbt Cloud to do what you want +- [Other ways to run dbt in airflow](https://docs.getdbt.com/docs/running-a-dbt-project/running-dbt-in-production/#using-airflow): Official dbt Docs on how teams are running dbt in airflow + +## This guide's process + +These solutions are great, but can be difficult to trust as your team grows and management for things like: testing, job definitions, secrets, and pipelines increase past your team’s capacity. Roles become blurry (or were never clearly defined at the start!). Both data and analytics engineers start digging through custom logging within each other’s workflows to make heads or tails of where and what the issue really is. Not to mention that when the issue is found, it can be even harder to decide on the best path forward for safely implementing fixes. This complex workflow and unclear delineation on process management results in a lot of misunderstandings and wasted time just trying to get the process to work smoothly! + +### A better way + +After today’s walkthrough, you’ll get hands-on experience: + +1. Creating a working local Airflow environment +2. Invoking a dbt Cloud job with Airflow (with proof!) +3. Reusing tested and trusted Airflow code for your specific use cases + +While you’re learning the ropes, you’ll also gain a better understanding of how this helps to: + +- Reduce the cognitive load when building and maintaining pipelines +- Avoid dependency hell (think: `pip install` conflicts) +- Implement better recoveries from failures +- Define clearer workflows so that data and analytics engineers work better, together ♥️ + +### Prerequisites + +- [dbt Cloud Teams or Enterprise account](https://www.getdbt.com/pricing/) (with [admin access](https://docs.getdbt.com/docs/collaborate/manage-access/enterprise-permissions)) in order to create a service token. Permissions for service tokens can be found [here](https://docs.getdbt.com/docs/dbt-cloud-apis/service-tokens#permissions-for-service-account-tokens). +- A [free Docker account](https://hub.docker.com/signup) in order to sign in to Docker Desktop, which will be installed in the initial setup. +- A local digital scratchpad for temporarily copy-pasting API keys and URLs + +🙌 Let’s get started! 🙌 diff --git a/website/docs/guides/orchestration/airflow-and-dbt-cloud/2-setting-up-airflow-and-dbt-cloud.md b/website/docs/guides/orchestration/airflow-and-dbt-cloud/2-setting-up-airflow-and-dbt-cloud.md new file mode 100644 index 00000000000..ab847c526a0 --- /dev/null +++ b/website/docs/guides/orchestration/airflow-and-dbt-cloud/2-setting-up-airflow-and-dbt-cloud.md @@ -0,0 +1,90 @@ +--- +title: Setting up Airflow and dbt Cloud +id: 2-setting-up-airflow-and-dbt-cloud +--- + +## 1. Install the Astro CLI + +Astro is a managed software service that includes key features for teams working with Airflow. In order to use Astro, we’ll install the Astro CLI, which will give us access to useful commands for working with Airflow locally. You can read more about Astro [here](https://docs.astronomer.io/astro/). + +In this example, we’re using Homebrew to install Astro CLI. Follow the instructions to install the Astro CLI for your own operating system [here](https://docs.astronomer.io/astro/install-cli). + +```bash +brew install astronomer/cloud/astrocloud +``` + + + +## 2. Install and start Docker Desktop + +Docker allows us to spin up an environment with all the apps and dependencies we need for the example. + +Follow the instructions [here](https://docs.docker.com/desktop/) to install Docker desktop for your own operating system. Once Docker is installed, ensure you have it up and running for the next steps. + + + +## 3. Clone the airflow-dbt-cloud repository + +Open your terminal and clone the [airflow-dbt-cloud repository](https://github.com/sungchun12/airflow-dbt-cloud.git). This contains example Airflow DAGs that you’ll use to orchestrate your dbt Cloud job. Once cloned, navigate into the `airflow-dbt-cloud` project. + +```bash +git clone https://github.com/sungchun12/airflow-dbt-cloud.git +cd airflow-dbt-cloud +``` + + + +## 4. Start the Docker container + +You can initialize an Astronomer project in an empty local directory using a Docker container, and then run your project locally using the `start` command. + +1. Run the following commands to initialize your project and start your local Airflow deployment: + + ```bash + astrocloud dev init + astrocloud dev start + ``` + + When this finishes, you should see a message similar to the following: + + ```bash + Airflow is starting up! This might take a few minutes… + + Project is running! All components are now available. + + Airflow Webserver: http://localhost:8080 + Postgres Database: localhost:5432/postgres + The default Airflow UI credentials are: admin:admin + The default Postrgres DB credentials are: postgres:postgres + ``` + +2. Open the Airflow interface. Launch your web browser and navigate to the address for the **Airflow Webserver** from your output in Step 1. + + This will take you to your local instance of Airflow. You’ll need to log in with the **default credentials**: + + - Username: admin + - Password: admin + + ![Airflow login screen](/img/guides/orchestration/airflow-and-dbt-cloud/airflow-login.png) + + + +## 5. Create a dbt Cloud service token + +Create a service token from within dbt Cloud using the instructions [found here](https://docs.getdbt.com/docs/dbt-cloud-apis/service-tokens). Ensure that you save a copy of the token, as you won’t be able to access this later. In this example we use `Account Admin`, but you can also use `Job Admin` instead for token permissions. + + + +## 6. Create a dbt Cloud job + +In your dbt Cloud account create a job, paying special attention to the information in the bullets below. Additional information for creating a dbt Cloud job can be found [here](https://docs.getdbt.com/docs/get-started/getting-started/building-your-first-project/schedule-a-job). + +- Configure the job with the commands that you want to include when this job kicks off, as Airflow will be referring to the job’s configurations for this rather than being explicitly coded in the Airflow DAG. This job will run a set of commands rather than a single command. +- Ensure that the schedule is turned **off** since we’ll be using Airflow to kick things off. +- Once you hit `save` on the job, make sure you copy the URL and save it for referencing later. The url will look similar to this: + +```html +https://cloud.getdbt.com/#/accounts/{account_id}/projects/{project_id}/jobs/{job_id}/ +``` + + diff --git a/website/docs/guides/orchestration/airflow-and-dbt-cloud/3-running-airflow-and-dbt-cloud.md b/website/docs/guides/orchestration/airflow-and-dbt-cloud/3-running-airflow-and-dbt-cloud.md new file mode 100644 index 00000000000..d6fd32bdba9 --- /dev/null +++ b/website/docs/guides/orchestration/airflow-and-dbt-cloud/3-running-airflow-and-dbt-cloud.md @@ -0,0 +1,104 @@ +--- +title: Running Airflow and dbt Cloud +id: 3-running-airflow-and-dbt-cloud +--- + + + +Now you have all the working pieces to get up and running with Airflow + dbt Cloud. Let’s dive into make this all work together. We will **set up a connection** and **run a DAG in Airflow** that kicks off a dbt Cloud job. + +## 1. Add your dbt Cloud API token as a secure connection + +1. Navigate to Admin and click on **Connections** + + ![Airflow connections menu](/img/guides/orchestration/airflow-and-dbt-cloud/airflow-connections-menu.png) + +2. Click on the `+` sign to add a new connection, then click on the drop down to search for the dbt Cloud Connection Type + + ![Create connection](/img/guides/orchestration/airflow-and-dbt-cloud/create-connection.png) + + ![Connection type](/img/guides/orchestration/airflow-and-dbt-cloud/connection-type.png) + +3. Add in your connection details and your default dbt Cloud account id. This is found in your dbt Cloud URL after the accounts route section (`/accounts/{YOUR_ACCOUNT_ID}`), for example the account with id 16173 would see this in their URL: `https://cloud.getdbt.com/#/accounts/16173/projects/36467/jobs/65767/` + +![https://lh3.googleusercontent.com/sRxe5xbv_LYhIKblc7eiY7AmByr1OibOac2_fIe54rpU3TBGwjMpdi_j0EPEFzM1_gNQXry7Jsm8aVw9wQBSNs1I6Cyzpvijaj0VGwSnmVf3OEV8Hv5EPOQHrwQgK2RhNBdyBxN2](https://lh3.googleusercontent.com/sRxe5xbv_LYhIKblc7eiY7AmByr1OibOac2_fIe54rpU3TBGwjMpdi_j0EPEFzM1_gNQXry7Jsm8aVw9wQBSNs1I6Cyzpvijaj0VGwSnmVf3OEV8Hv5EPOQHrwQgK2RhNBdyBxN2) + +## 2. Add your `job_id` and `account_id` config details to the python file: [dbt_cloud_provider_eltml.py](https://github.com/sungchun12/airflow-dbt-cloud/blob/main/dags/dbt_cloud_provider_eltml.py) + +1. You’ll find these details within the dbt Cloud job URL, see the comments in the code snippet below for an example. + + ```python + # dbt Cloud Job URL: https://cloud.getdbt.com/#/accounts/16173/projects/36467/jobs/65767/ + # account_id: 16173 + #job_id: 65767 + + # line 28 + default_args={"dbt_cloud_conn_id": "dbt_cloud", "account_id": 16173}, + + trigger_dbt_cloud_job_run = DbtCloudRunJobOperator( + task_id="trigger_dbt_cloud_job_run", + job_id=65767, # line 39 + check_interval=10, + timeout=300, + ) + ``` + +2. Turn on the DAG and verify the job succeeded after running. Note: screenshots taken from different job runs, but the user experience is consistent. + + ![https://lh6.googleusercontent.com/p8AqQRy0UGVLjDGPmcuGYmQ_BRodyL0Zis-eQgSmp69EHbKW51o4S-bCl1fXHlOmwpYEBxD0A-O1Q1hwt-VDVMO1wWH-AIeaoelBx06JXRJ0m1OcHaPpFKH0xDiduIhNlQhhbLiy](https://lh6.googleusercontent.com/p8AqQRy0UGVLjDGPmcuGYmQ_BRodyL0Zis-eQgSmp69EHbKW51o4S-bCl1fXHlOmwpYEBxD0A-O1Q1hwt-VDVMO1wWH-AIeaoelBx06JXRJ0m1OcHaPpFKH0xDiduIhNlQhhbLiy) + + ![Airflow DAG](/img/guides/orchestration/airflow-and-dbt-cloud/airflow-dag.png) + + ![Task run instance](/img/guides/orchestration/airflow-and-dbt-cloud/task-run-instance.png) + + ![https://lh6.googleusercontent.com/S9QdGhLAdioZ3x634CChugsJRiSVtTTd5CTXbRL8ADA6nSbAlNn4zV0jb3aC946c8SGi9FRTfyTFXqjcM-EBrJNK5hQ0HHAsR5Fj7NbdGoUfBI7xFmgeoPqnoYpjyZzRZlXkjtxS](https://lh6.googleusercontent.com/S9QdGhLAdioZ3x634CChugsJRiSVtTTd5CTXbRL8ADA6nSbAlNn4zV0jb3aC946c8SGi9FRTfyTFXqjcM-EBrJNK5hQ0HHAsR5Fj7NbdGoUfBI7xFmgeoPqnoYpjyZzRZlXkjtxS) + +## How do I rerun the dbt Cloud job and downstream tasks in my pipeline? + +If you have worked with dbt Cloud before, you have likely encountered cases where a job fails. In those cases, you have likely logged into dbt Cloud, investigated the error, and then manually restarted the job. + +This section of the guide will show you how to restart the job directly from Airflow. This will specifically run *just* the `trigger_dbt_cloud_job_run` and downstream tasks of the Airflow DAG and not the entire DAG. If only the transformation step fails, you don’t need to re-run the extract and load processes. Let’s jump into how to do that in Airflow. + +1. Click on the task + + ![Task DAG view](/img/guides/orchestration/airflow-and-dbt-cloud/task-dag-view.png) + +2. Clear the task instance + + ![Clear task instance](/img/guides/orchestration/airflow-and-dbt-cloud/clear-task-instance.png) + + ![Approve clearing](/img/guides/orchestration/airflow-and-dbt-cloud/approve-clearing.png) + +3. Watch it rerun in real time + + ![Re-run](/img/guides/orchestration/airflow-and-dbt-cloud/re-run.png) + +## Cleaning up + +At the end of this guide, make sure you shut down your docker container. When you’re done using Airflow, use the following command to stop the container: + +```bash +$ astrocloud dev stop + +[+] Running 3/3 + ⠿ Container airflow-dbt-cloud_e3fe3c-webserver-1 Stopped 7.5s + ⠿ Container airflow-dbt-cloud_e3fe3c-scheduler-1 Stopped 3.3s + ⠿ Container airflow-dbt-cloud_e3fe3c-postgres-1 Stopped 0.3s +``` + +To verify that the deployment has stopped, use the following command: + +```bash +astrocloud dev ps +``` + +This should give you an output like this: + +```bash +Name State Ports +airflow-dbt-cloud_e3fe3c-webserver-1 exited +airflow-dbt-cloud_e3fe3c-scheduler-1 exited +airflow-dbt-cloud_e3fe3c-postgres-1 exited +``` + + diff --git a/website/docs/guides/orchestration/airflow-and-dbt-cloud/4-airflow-and-dbt-cloud-faqs.md b/website/docs/guides/orchestration/airflow-and-dbt-cloud/4-airflow-and-dbt-cloud-faqs.md new file mode 100644 index 00000000000..d6cecd1457d --- /dev/null +++ b/website/docs/guides/orchestration/airflow-and-dbt-cloud/4-airflow-and-dbt-cloud-faqs.md @@ -0,0 +1,50 @@ +--- +title: Airflow and dbt Cloud FAQs +id: 4-airflow-and-dbt-cloud-faqs +--- +## 1. How can we run specific subsections of the dbt DAG in Airflow? + +Because of the way we configured the dbt Cloud job to run in Airflow, you can leave this job to your analytics engineers to define in the job configurations from dbt Cloud. If, for example, we need to run hourly-tagged models every hour and daily-tagged models daily, we can create jobs like `Hourly Run` or `Daily Run` and utilize the commands `dbt run -s tag:hourly` and `dbt run -s tag:daily` within each, respectively. We only need to grab our dbt Cloud `account` and `job id`, configure it in an Airflow DAG with the code provided, and then we can be on your way. See more node selection options: [here](/reference/node-selection/syntax) + +## 2. How can I re-run models from the point of failure? + +You may want to parse the dbt DAG in Airflow to get the benefit of re-running from the point of failure. However, when you have hundreds of models in your DAG expanded out, it becomes useless for diagnosis and rerunning due to the overhead that comes along with creating an expansive Airflow DAG. + +You can’t re-run from failure natively in dbt Cloud today (feature coming!), but you can use a custom rerun parser. + +Using a simple python script coupled with the dbt Cloud provider, you can: + +- Avoid managing artifacts in a separate storage bucket(dbt Cloud does this for you) +- Avoid building your own parsing logic +- Get clear logs on what models you're rerunning in dbt Cloud (without hard coding step override commands) + +Watch the video below to see how it works! + + + +## 3. Should Airflow run one big dbt job or many dbt jobs? + +Overall we recommend being as purposeful and minimalistic as you can. This is because dbt manages all of the dependencies between models and the orchestration of running those dependencies in order, which in turn has benefits in terms of warehouse processing efforts. + +## 4. We want to kick off our dbt jobs after our ingestion tool (such as Fivetran) / data pipelines are done loading data. Any best practices around that? + +Our friends at Astronomer answer this question with this example: [here](https://registry.astronomer.io/dags/fivetran-dbt-cloud-census) + +## 5. How do you set up a CI/CD workflow with Airflow? + +Check out these two resources for accomplishing your own CI/CD pipeline: + +- [Continuous Integration with dbt Cloud](/docs/deploy/cloud-ci-job) +- [Astronomer's CI/CD Example](https://docs.astronomer.io/software/ci-cd/#example-cicd-workflow) + +## 6. Can dbt dynamically create tasks in the DAG like Airflow can? + +We prefer to keep models bundled vs. unbundled. You can go this route, but if you have hundreds of dbt models, it’s more effective to let the dbt Cloud job handle the models and dependencies. Bundling provides the solution to clear observability when things go wrong - we've seen more success in having the ability to clearly see issues in a bundled dbt Cloud job than combing through the nodes of an expansive Airflow DAG. If you still have a use case for this level of control though, our friends at Astronomer answer this question [here](https://www.astronomer.io/blog/airflow-dbt-1/)! + +## 7. Can you trigger notifications if a dbt job fails with Airflow? Is there any way to access the status of the dbt Job to do that? + +Yes, either through [Airflow's email/slack](https://www.astronomer.io/guides/error-notifications-in-airflow/) functionality by itself or combined with [dbt Cloud's notifications](/docs/deploy/job-notifications), which support email and slack notifications. + +## 8. Are there decision criteria for how to best work with dbt Cloud and airflow? + +Check out this deep dive into planning your dbt Cloud + Airflow implementation [here](https://www.youtube.com/watch?v=n7IIThR8hGk)! diff --git a/website/docs/guides/orchestration/custom-cicd-pipelines/1-cicd-background.md b/website/docs/guides/orchestration/custom-cicd-pipelines/1-cicd-background.md new file mode 100644 index 00000000000..4dbcbb6c6c7 --- /dev/null +++ b/website/docs/guides/orchestration/custom-cicd-pipelines/1-cicd-background.md @@ -0,0 +1,43 @@ +--- +title: Customizing CI/CD +id: 1-cicd-background +--- + +# Creating Custom CI/CD Pipelines + +One of the core tenets of dbt is that analytic code should be version controlled. This provides a ton of benefit to your organization in terms of collaboration, code consistency, stability, and the ability to roll back to a prior version. There’s an additional benefit that is provided with your code hosting platform that is often overlooked or underutilized. Some of you may have experience using dbt Cloud’s [webhook functionality](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration) to run a job when a PR is created. This is a fantastic capability, and meets most use cases for testing your code before merging to production. However, there are circumstances when an organization needs additional functionality, like running workflows on every commit (linting), or running workflows after a merge is complete. In this article, we will show you how to setup custom pipelines to lint your project and trigger a dbt Cloud job via the API. + +A note on parlance in this article since each code hosting platform uses different terms for similar concepts. The terms `pull request` (PR) and `merge request` (MR) are used interchangeably to mean the process of merging one branch into another branch. + + +## What are pipelines? + +Pipelines (which are known by many names, such as workflows, actions, or build steps) are a series of pre-defined jobs that are triggered by specific events in your repository (PR created, commit pushed, branch merged, etc). Those jobs can do pretty much anything your heart desires assuming you have the proper security access and coding chops. + +Jobs are executed on [runners](https://docs.github.com/en/actions/learn-github-actions/understanding-github-actions#runners), which are virtual servers. The runners come pre-configured with Ubuntu Linux, macOS, or Windows. That means the commands you execute are determined by the operating system of your runner. You’ll see how this comes into play later in the setup, but for now just remember that your code is executed on virtual servers that are, typically, hosted by the code hosting platform. + +![Diagram of how pipelines work](/img/guides/orchestration/custom-cicd-pipelines/pipeline-diagram.png) + +Please note, runners hosted by your code hosting platform provide a certain amount of free time. After that, billing charges may apply depending on how your account is setup. You also have the ability to host your own runners. That is beyond the scope of this article, but checkout the links below for more information if you’re interested in setting that up: + +- Repo-hosted runner billing information: + - [GitHub](https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions) + - [GitLab](https://docs.gitlab.com/ee/ci/pipelines/cicd_minutes.html) +- Self-hosted runner information: + - [GitHub](https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners) + - [GitLab](https://docs.gitlab.com/runner/) + +Additionally, if you’re using the free tier of GitLab you can still follow this guide, but it may ask you to provide a credit card to verify your account. You’ll see something like this the first time you try to run a pipeline: + +![Warning from GitLab showing payment information is required](/img/guides/orchestration/custom-cicd-pipelines/gitlab-cicd-payment-warning.png) + + +## How to setup pipelines + +This guide provides details for multiple code hosting platforms. Where steps are unique, they are presented without a selection option. If code is specific to a platform (i.e. GitHub, GitLab) you will see a selection option for each. + +Pipelines can be triggered by various events. The [dbt Cloud webhook](https://docs.getdbt.com/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration) process already triggers a run if you want to run your jobs on a merge request, so this guide focuses on running pipelines for every push and when PRs are merged. Since pushes happen frequently in a project, we’ll keep this job super simple and fast by linting with SQLFluff. The pipeline that runs on merge requests will run less frequently, and can be used to call the dbt Cloud API to trigger a specific job. This can be helpful if you have specific requirements that need to happen when code is updated in production, like running a `--full-refresh` on all impacted incremental models. + +Here’s a quick look at what this pipeline will accomplish: + +![Diagram showing the pipelines to be created and the programs involved](/img/guides/orchestration/custom-cicd-pipelines/pipeline-programs-diagram.png) diff --git a/website/docs/guides/orchestration/custom-cicd-pipelines/2-lint-on-push.md b/website/docs/guides/orchestration/custom-cicd-pipelines/2-lint-on-push.md new file mode 100644 index 00000000000..5031ea50844 --- /dev/null +++ b/website/docs/guides/orchestration/custom-cicd-pipelines/2-lint-on-push.md @@ -0,0 +1,142 @@ +--- +title: Lint code on push +id: 2-lint-on-push +--- + +This section shows a very basic example of linting a project every time a commit is pushed to the repo. While it is simple, it shows the power of CI and can be expanded on to meet the needs of your organization. + +The steps below use [SQLFluff](https://docs.sqlfluff.com/en/stable/) to scan your code and look for linting errors. In the example, it's set to use the `snowflake` dialect, and specfically runs the rules L019, L020, L021, and L022. This is purley for demonstration purposes. You should update this to reflect your code base's [dialect](https://docs.sqlfluff.com/en/stable/dialects.html) and the [rules](https://docs.sqlfluff.com/en/stable/rules.html) you've established for your repo. + +### 1. Create a yaml file to define your pipeline + +The yaml files defined below are what tell your code hosting platform the steps to run. In this setup, you’re telling the platform to run a SQLFluff lint job every time a commit is pushed. + + + + +In order for GitHub to know that you want to run an action, you need to have a few specific folders in your project. Add a new folder named `.github`, and within that folder add a new one named `workflows`. Your final folder structure will look like this: + +```sql +my_awesome_project +├── .github +│ ├── workflows +│ │ └── lint_on_push.yml +``` + +To define the job for our action, let’s add a new file named `lint_on_push.yml` under the `workflows` folder. This file is how we tell the GitHub runner what to execute when the job is triggered. + +Below I touch on the important pieces for running a dbt Cloud job, but if you want a full run-down of all the components of this yaml file checkout [this GitHub article](https://docs.github.com/en/actions/learn-github-actions/understanding-github-actions#understanding-the-workflow-file) on actions. + +**Key pieces:** + +- `on:` - this is used to filter when the pipeline is run. In this example we’re running it on every push except for pushes to branches named `main`. For more filters, checkout [GitHub’s docs](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows). +- `runs-on: ubuntu-latest` - this defines the operating system we’re using to run the job +- `uses:` - remember the virtual servers we coved in the background section? They’re just empty operating systems, so there are two pieces of setup that are needed in order to access the code in your repo, and setup Python correctly on the virtual server. These two actions are called from other repos in GitHub to provide those services. For more information on them, checkout their repos: [actions/checkout](https://github.com/actions/checkout#checkout-v3) and [actions/setup-python](https://github.com/actions/setup-python#setup-python-v3). +- `run:` - this is how we’re telling the GitHub runner to execute the Python script we defined above. + +```yaml +name: lint dbt project on push + +on: + push: + branches-ignore: + - 'main' + +jobs: +# this job runs SQLFluff with a specific set of rules + # note the dialect is set to Snowflake, so make that specific to your setup + # details on linter rules: https://docs.sqlfluff.com/en/stable/rules.html + lint_project: + name: Run SQLFluff linter + runs-on: ubuntu-latest + + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v2" + with: + python-version: "3.9" + - name: Install SQLFluff + run: "pip install sqlfluff==0.13.1" + - name: Lint project + run: "sqlfluff lint models --dialect snowflake --rules L019,L020,L021,L022" + +``` + + + + +Create a `.gitlab-ci.yml` file in your **root directory** to define the triggers for when to execute the script above. You’ll put the code below into this file. + +```sql +my_awesome_project +├── dbt_project.yml +├── .gitlab-ci.yml +``` + +**Key pieces:** + +- `image: python:3.9` - this defines the virtual image we’re using to run the job +- `rules:` - this is used to filter when the pipeline runs. In this case we’re telling it to run on every push event except when the branch is named `main`. Filters are very powerful to run commands on specific events, and you can find a full list in [GitLab’s documentation](https://docs.gitlab.com/ee/ci/yaml/#rules). +- `script:` - this is how we’re telling the GitLab runner to execute the Python script we defined above. + +```yaml +image: python:3.9 + +stages: + - pre-build + +# this job runs SQLFluff with a specific set of rules +# note the dialect is set to Snowflake, so make that specific to your setup +# details on linter rules: https://docs.sqlfluff.com/en/stable/rules.html +lint-project: + stage: pre-build + rules: + - if: $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH != 'main' + script: + - pip install sqlfluff==0.13.1 + - sqlfluff lint models --dialect snowflake --rules L019,L020,L021,L022 +``` + + + + +### 2. Commit and push your changes to make sure everything works + +After you finish creating the yaml files, commit and push your code. Doing this will trigger your pipeline for the first time! If everything goes well, you should see the pipeline in your code platform. When you click into the job you’ll get a log showing that SQLFluff was run. If your code failed linting you’ll get an error in the job with a description of what needs to be fixed. If everything passed the lint check, you’ll see a successful job run. + + + + +In your repository, click the *Actions* tab + +![Image showing the GitHub action for lint on push](/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-github.png) + +Sample output from SQLFluff in the `Run SQLFluff linter` job: + +![Image showing the logs in GitHub for the SQLFluff run](/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-logs-github.png) + + + + +In the menu option got to *CI/CD > Pipelines* + +![Image showing the GitLab action for lint on push](/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-gitlab.png) + +Sample output from SQLFluff in the `Run SQLFluff linter` job: + +![Image showing the logs in GitLab for the SQLFluff run](/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-logs-gitlab.png) + + + diff --git a/website/docs/guides/orchestration/custom-cicd-pipelines/3-dbt-cloud-job-on-merge.md b/website/docs/guides/orchestration/custom-cicd-pipelines/3-dbt-cloud-job-on-merge.md new file mode 100644 index 00000000000..042e6ea3d61 --- /dev/null +++ b/website/docs/guides/orchestration/custom-cicd-pipelines/3-dbt-cloud-job-on-merge.md @@ -0,0 +1,295 @@ +--- +title: Run a dbt Cloud job on merge +id: 3-dbt-cloud-job-on-merge +--- + +This job will take a bit more to setup, but is a good example of how to call the dbt Cloud API from a CI/CD pipeline. The concepts persented here can be generalized and used in whatever way best suits your use case. + +The setup below shows how to call the dbt Cloud API to run a job every time there is a push to your main branch (i.e. a PR is merged). + + +### 1. Get your dbt Cloud API key + +When running a CI/CD pipeline you’ll want to use a service token instead of any individual’s API key. There are [detailed docs](https://docs.getdbt.com/docs/dbt-cloud-apis/service-tokens) available on this, but below is a quick rundown (this must be performed by an Account Admin): + +- Login to your dbt Cloud account +- In the upper left, click the menu button, then *Account Settings* +- Click *Service Tokens* on the left +- Click *New Token* to create a new token specifically for CI/CD API calls +- Name your token something like “CICD Token” +- Click the *+Add* button under *Access,* and grant this token the *Job Admin* permission +- Click *Save* and you’ll see a grey box appear with your token. Copy that and save it somewhere safe (this is a password, and should be treated as such). + +![View of the dbt Cloud page where service tokens are created](/img/guides/orchestration/custom-cicd-pipelines/dbt-service-token-page.png) + +Here’s a video showing the steps as well: + + + +### 2. Put your dbt Cloud API key into your repo + +This next part will happen in you code hosting platform. We need to save your API key from above into a repository secret so the job we create can access it. It is **not** recommended to ever save passwords or API keys in your code, so this step ensures that your key stays secure, but is still usable for your pipelines. + + + + +In GitHub: + +- Open up your repository where you want to run the pipeline (the same one that houses your dbt project) +- Click *Settings* to open up the repository options +- On the left click the *Security* dropdown +- From that list, click on *Actions* +- Towards the middle of the screen, click the *New repository secret* button +- It will ask you for a name, so let’s call ours `DBT_API_KEY` + - **It’s very important that you copy/paste this name exactly because it’s used in the scripts below.** +- In the *Value* section, paste in the key you copied from dbt Cloud +- Click *Add secret* and you’re all set! + +** A quick note on security: while using a repository secret is the most straightforward way to setup this secret, there are other options available to you in GitHub. They’re beyond the scope of this guide, but could be helpful if you need to create a more secure environment for running actions. Checkout GitHub’s documentation on secrets [here](https://docs.github.com/en/actions/security-guides/encrypted-secrets).* + +Here’s a video showing these steps: + + + + + +In GitLab: + +- Open up your repository where you want to run the pipeline (the same one that houses your dbt project) +- Click *Settings* > *CI/CD* +- Under the *Variables* section, click *Expand,* then click *Add variable* +- It will ask you for a name, so let’s call ours `DBT_API_KEY` + - **It’s very important that you copy/paste this name exactly because it’s used in the scripts below.** +- In the *Value* section, paste in the key you copied from dbt Cloud +- Make sure the check box next to *Protect variable* is unchecked, and the box next to *Mask variable* is selected (see below) + - “Protected” means that the variable is only available in pipelines that run on protected branches or protected tags - that won’t work for us because we want to run this pipeline on multiple branches. “Masked” means that it will be available to your pipeline runner, but will be masked in the logs. + + ![View of the GitLab window for entering DBT_API_KEY](/img/guides/orchestration/custom-cicd-pipelines/dbt-api-key-gitlab.png) + + Here’s a video showing these steps: + + + + + + +### 3. Create script to trigger dbt Cloud job via an API call + +In your dbt Cloud project, create a new folder at the root level named `python`. In that folder, create a file named `run_and_monitor_dbt_job.py`. You’ll copy/paste the contents from this [gist](https://gist.github.com/b-per/f4942acb8584638e3be363cb87769b48) into that file. + +```yaml +my_awesome_project +├── python +│ └── run_and_monitor_dbt_job.py +``` + +This Python file has everything you need to call the dbt Cloud API, but requires a few inputs (see snip below). Those inputs are fed to this script through environment variables that will be defined in the next step. + +```python +#------------------------------------------------------------------------------ +# get environment variables +#------------------------------------------------------------------------------ +api_base = os.getenv('DBT_URL', 'https://cloud.getdbt.com/') # default to multitenant url +job_cause = os.getenv('DBT_JOB_CAUSE', 'API-triggered job') # default to generic message +git_branch = os.getenv('DBT_JOB_BRANCH', None) # default to None +schema_override = os.getenv('DBT_JOB_SCHEMA_OVERRIDE', None) # default to None +api_key = os.environ['DBT_API_KEY'] # no default here, just throw an error here if key not provided +account_id = os.environ['DBT_ACCOUNT_ID'] # no default here, just throw an error here if id not provided +project_id = os.environ['DBT_PROJECT_ID'] # no default here, just throw an error here if id not provided +job_id = os.environ['DBT_PR_JOB_ID'] # no default here, just throw an error here if id not provided +``` + +**Required input:** + +In order to call the dbt Cloud API, there are a few pieces of info the script needs. The easiest way to get these values is to open up the job you want to run in dbt Cloud. The URL when you’re inside the job has all the values you need: + +- `DBT_ACCOUNT_ID` - this is the number just after `accounts/` in the URL +- `DBT_PROJECT_ID` - this is the number just after `projects/` in the URL +- `DBT_PR_JOB_ID` - this is the number just after `jobs/` in the URL + +![Image of a dbt Cloud job URL with the pieces for account, project, and job highlighted](/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-url.png) + +### 4. Update your project to include the new API call + + + + +For this new job, we’ll add a file for the dbt Cloud API call named `dbt_run_on_merge.yml`. + +```yaml +my_awesome_project +├── python +│ └── run_and_monitor_dbt_job.py +├── .github +│ ├── workflows +│ │ └── dbt_run_on_merge.yml +│ │ └── lint_on_push.yml +``` + +The yaml file will look pretty similar to our earlier job, but there is a new section called `env` that we’ll use to pass in the required variables. Update the variables below to match your setup based on the comments in the file. + +It’s worth noting that we changed the `on:` section to now run **only** when there are pushes to a branch named `main` (i.e. a PR is merge). Have a look through [GitHub’s docs](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows) on these filters for additional use cases. + +```yaml + +name: run dbt Cloud job on push + +# This filter says only run this job when there is a push to the main branch +# This works off the assumption that you've restrictred this branch to only all PRs to push to the deafult branch +# Update the name to match the name of your default branch +on: + push: + branches: + - 'main' + +jobs: + + # the job calls the dbt Cloud API to run a job + run_dbt_cloud_job: + name: Run dbt Cloud Job + runs-on: ubuntu-latest + + # Set the environment variables needed for the run + env: + DBT_ACCOUNT_ID: 00000 # enter your account id + DBT_PROJECT_ID: 00000 # enter your project id + DBT_PR_JOB_ID: 00000 # enter your job id + DBT_API_KEY: ${{ secrets.DBT_API_KEY }} + DBT_JOB_CAUSE: 'GitHub Pipeline CI Job' + DBT_JOB_BRANCH: ${{ github.ref_name }} + + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v2" + with: + python-version: "3.9" + - name: Run dbt Cloud job + run: "python python/run_and_monitor_dbt_job.py" +``` + + + + +For this job, we'll set it up using the `gitlab-ci.yml` file as in the prior step (see Step 1 of the linting setup for more info). The yaml file will look pretty similar to our earlier job, but there is a new section called `variables` that we’ll use to pass in the required variables to the Python script. Update this section to match your setup based on the comments in the file. + +Please note that the `rules:` section now says to run **only** when there are pushes to a branch named `main` (i.e. a PR is merge). Have a look through [GitLab’s docs](https://docs.gitlab.com/ee/ci/yaml/#rules) on these filters for additional use cases. + + + + +```yaml +image: python:3.9 + +variables: + DBT_ACCOUNT_ID: 00000 # enter your account id + DBT_PROJECT_ID: 00000 # enter your project id + DBT_PR_JOB_ID: 00000 # enter your job id + DBT_API_KEY: $DBT_API_KEY # secret variable in gitlab account + DBT_URL: https://cloud.getdbt.com + DBT_JOB_CAUSE: 'GitLab Pipeline CI Job' + DBT_JOB_BRANCH: $CI_COMMIT_BRANCH + +stages: + - build + +# this job calls the dbt Cloud API to run a job +run-dbt-cloud-job: + stage: build + rules: + - if: $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == 'main' + script: + - python python/run_and_monitor_dbt_job.py +``` + + + + +```yaml +image: python:3.9 + +variables: + DBT_ACCOUNT_ID: 00000 # enter your account id + DBT_PROJECT_ID: 00000 # enter your project id + DBT_PR_JOB_ID: 00000 # enter your job id + DBT_API_KEY: $DBT_API_KEY # secret variable in gitlab account + DBT_URL: https://cloud.getdbt.com + DBT_JOB_CAUSE: 'GitLab Pipeline CI Job' + DBT_JOB_BRANCH: $CI_COMMIT_BRANCH + +stages: + - pre-build + - build + +# this job runs SQLFluff with a specific set of rules +# note the dialect is set to Snowflake, so make that specific to your setup +# details on linter rules: https://docs.sqlfluff.com/en/stable/rules.html +lint-project: + stage: pre-build + rules: + - if: $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH != 'main' + script: + - pip install sqlfluff==0.13.1 + - sqlfluff lint models --dialect snowflake --rules L019,L020,L021,L022 + +# this job calls the dbt Cloud API to run a job +run-dbt-cloud-job: + stage: build + rules: + - if: $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == 'main' + script: + - python python/run_and_monitor_dbt_job.py +``` + + + + + + + + +### 5. Test your new action + +Now that you have a shiny new action, it’s time to test it out! Since this change is setup to only run on merges to your default branch, you’ll need to create and merge this change into your main branch. Once you do that, you’ll see a new pipeline job has been triggered to run the dbt Cloud job you assigned in the variables section. + +Additionally, you’ll see the job in the run history of dbt Cloud. It should be fairly easy to spot because it will say it was triggered by the API, and the *INFO* section will have the branch you used for this guide. + + + + +![dbt run on merge job in GitHub](/img/guides/orchestration/custom-cicd-pipelines/dbt-run-on-merge-github.png) + +![dbt Cloud job showing it was triggered by GitHub](/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-github-triggered.png) + + + + +![dbt run on merge job in GitLub](/img/guides/orchestration/custom-cicd-pipelines/dbt-run-on-merge-gitlab.png) + +![dbt Cloud job showing it was triggered by GitLub](/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-gitlab-triggered.png) + + + diff --git a/website/docs/guides/orchestration/custom-cicd-pipelines/4-something-to-consider.md b/website/docs/guides/orchestration/custom-cicd-pipelines/4-something-to-consider.md new file mode 100644 index 00000000000..180ab35d44f --- /dev/null +++ b/website/docs/guides/orchestration/custom-cicd-pipelines/4-something-to-consider.md @@ -0,0 +1,8 @@ +--- +title: Something to Consider +id: 4-something-to-consider +--- + +Running dbt Cloud jobs through a CI/CD pipeline is a form of job orchestration. If you also run jobs using dbt Cloud’s built in scheduler, you now have 2 orchestration tools running jobs. The risk with this is that you could run into conflicts - you can imagine a case where you are triggering a pipeline on certain actions and running scheduled jobs in dbt Cloud, you would probably run into job clashes. The more tools you have, the more you have to make sure everything talks to each other. + +That being said, if **the only reason you want to use pipelines is for adding a lint check or run on merge**, you might decide the pros outweigh the cons, and as such you want to go with a hybrid approach. Just keep in mind that if two processes try and run the same job at the same time, dbt Cloud will queue the jobs and run one after the other. It’s a balancing act but can be accomplished with diligence to ensure you’re orchestrating jobs in a manner that does not conflict. \ No newline at end of file diff --git a/website/docs/reference/analysis-properties.md b/website/docs/reference/analysis-properties.md index 1fc565a8edc..e2dce1a7920 100644 --- a/website/docs/reference/analysis-properties.md +++ b/website/docs/reference/analysis-properties.md @@ -14,7 +14,7 @@ version: 2 analyses: - name: # required [description](description): - [docs](resource-properties/docs): + [docs](/reference/resource-configs/docs): show: true | false config: [tags](resource-configs/tags): | [] diff --git a/website/docs/reference/artifacts/catalog-json.md b/website/docs/reference/artifacts/catalog-json.md index 2de1e1dff3d..d5788f6f250 100644 --- a/website/docs/reference/artifacts/catalog-json.md +++ b/website/docs/reference/artifacts/catalog-json.md @@ -6,7 +6,7 @@ _Current schema_: [`v1`](https://schemas.getdbt.com/dbt/catalog/v1.json) _Produced by:_ `dbt docs generate` -This file contains information from your data warehouse about the tables and views produced and defined by the resources in your project. Today, dbt uses this file to populate metadata, such as column types and statistics, in the [docs site](documentation). +This file contains information from your about the tables and views produced and defined by the resources in your project. Today, dbt uses this file to populate metadata, such as column types and statistics, in the [docs site](documentation). ### Top-level keys diff --git a/website/docs/reference/artifacts/dbt-artifacts.md b/website/docs/reference/artifacts/dbt-artifacts.md index 385d7b12ab5..bc93aa8cf6e 100644 --- a/website/docs/reference/artifacts/dbt-artifacts.md +++ b/website/docs/reference/artifacts/dbt-artifacts.md @@ -2,10 +2,10 @@ title: Overview --- -With every invocation, dbt generates and saves one or more *artifacts*. Several of these are JSON files (`manifest.json`, `catalog.json`, `run_results.json`, and `sources.json`) that are used to power: +With every invocation, dbt generates and saves one or more *artifacts*. Several of these are files (`manifest.json`, `catalog.json`, `run_results.json`, and `sources.json`) that are used to power: - [documentation](documentation) - [state](understanding-state) -- [visualizing source freshness](cloud-snapshotting-source-freshness) +- [visualizing source freshness](/docs/build/sources#snapshotting-source-data-freshness) They could also be used to: - calculate project-level test coverage @@ -43,9 +43,4 @@ In the manifest, the `metadata` may also include: #### Notes: - The structure of dbt artifacts is canonized by [JSON schemas](https://json-schema.org/), which are hosted at **schemas.getdbt.com**. -- As of v0.20.0, the current schema for each artifact is: - - https://schemas.getdbt.com/dbt/manifest/v4.json - - https://schemas.getdbt.com/dbt/run-results/v4.json - - https://schemas.getdbt.com/dbt/catalog/v1.json - - https://schemas.getdbt.com/dbt/sources/v3.json -- Artifact versions may change in any minor version of dbt (`v0.x.0`). Each artifact is versioned independently. +- Artifact versions may change in any minor version of dbt (`v1.x.0`). Each artifact is versioned independently. diff --git a/website/docs/reference/artifacts/manifest-json.md b/website/docs/reference/artifacts/manifest-json.md index f54125c901e..21faaab44c6 100644 --- a/website/docs/reference/artifacts/manifest-json.md +++ b/website/docs/reference/artifacts/manifest-json.md @@ -2,7 +2,7 @@ title: Manifest --- -_Current schema_: [`v4`](https://schemas.getdbt.com/dbt/manifest/v4/index.html) +_Current schema_: [`v7`](https://schemas.getdbt.com/dbt/manifest/v7/index.html) _Produced by:_ - `dbt compile` diff --git a/website/docs/reference/artifacts/run-results-json.md b/website/docs/reference/artifacts/run-results-json.md index 9e0d27f0da5..f2ded4f23dc 100644 --- a/website/docs/reference/artifacts/run-results-json.md +++ b/website/docs/reference/artifacts/run-results-json.md @@ -22,7 +22,7 @@ Note: `dbt source freshness` produces a different artifact, [`sources.json`](sou ### Top-level keys - [`metadata`](dbt-artifacts#common-metadata) -- `args`: Dictionary of arguments passed to the CLI command or RPC method that produced this artifact. Most useful is `which` (command) or `rpc_method`. This dict excludes null values, and includes default values if they are not null. +- `args`: Dictionary of arguments passed to the CLI command or RPC method that produced this artifact. Most useful is `which` (command) or `rpc_method`. This dict excludes null values, and includes default values if they are not null. Equivalent to [`invocation_args_dict`](flags#invocation_args_dict) in the dbt-Jinja context. - `elapsed_time`: Total invocation time in seconds. - `results`: Array of node execution details. diff --git a/website/docs/reference/artifacts/sources-json.md b/website/docs/reference/artifacts/sources-json.md index 678fc19aa9c..8d36d9d2e1e 100644 --- a/website/docs/reference/artifacts/sources-json.md +++ b/website/docs/reference/artifacts/sources-json.md @@ -6,7 +6,7 @@ _Current schema_: [`v3`](https://schemas.getdbt.com/dbt/sources/v3/index.html) _Produced by:_ `dbt source freshness` -This file contains information about [sources with freshness checks](using-sources#snapshotting-source-data-freshness). Today, dbt Cloud uses this file to power its [Source Freshness visualization](cloud-snapshotting-source-freshness). +This file contains information about [sources with freshness checks](/docs/build/sources#checking-source-freshness). Today, dbt Cloud uses this file to power its [Source Freshness visualization](/docs/build/sources#snapshotting-source-data-freshness). ### Top-level keys diff --git a/website/docs/reference/commands/clean.md b/website/docs/reference/commands/clean.md index e65b4ac671c..11b9b18ceaa 100644 --- a/website/docs/reference/commands/clean.md +++ b/website/docs/reference/commands/clean.md @@ -11,4 +11,4 @@ id: "clean" `dbt clean` is a utility function that deletes all folders specified in the `clean-targets` list specified in `dbt_project.yml`. You can use this to delete the `dbt_packages` and `target` directories. -To avoid complex permissions issues and potentially deleting crucial aspects of the remote file system without access to fix them, this command does not work when interfacing with the RPC server that powers the dbt Cloud IDE. Instead, when working in dbt Cloud, the `dbt deps` command cleans before it installs pacakges automatically. The `target` folder can be manually deleted from the sidbear file tree if needed. +To avoid complex permissions issues and potentially deleting crucial aspects of the remote file system without access to fix them, this command does not work when interfacing with the RPC server that powers the dbt Cloud IDE. Instead, when working in dbt Cloud, the `dbt deps` command cleans before it installs packages automatically. The `target` folder can be manually deleted from the sidbear file tree if needed. diff --git a/website/docs/reference/commands/cmd-docs.md b/website/docs/reference/commands/cmd-docs.md index d9032c29b91..33a46582bad 100644 --- a/website/docs/reference/commands/cmd-docs.md +++ b/website/docs/reference/commands/cmd-docs.md @@ -26,13 +26,14 @@ dbt docs generate --no-compile ``` ### dbt docs serve -This command starts a webserver on port 8000 to serve your documentation locally. The webserver is rooted in your `target/` directory. Be sure to run `dbt docs generate` before `dbt docs serve` because the `generate` command produces a [catalog metadata artifact](/reference/artifacts/catalog-json) that the `serve` command depends upon. You will see an error message if the catalog is missing. +This command starts a webserver on port 8000 to serve your documentation locally and opens the documentation site in your default browser. The webserver is rooted in your `target/` directory. Be sure to run `dbt docs generate` before `dbt docs serve` because the `generate` command produces a [catalog metadata artifact](/reference/artifacts/catalog-json) that the `serve` command depends upon. You will see an error message if the catalog is missing. **Usage:** ``` dbt docs serve [--profiles-dir PROFILES_DIR] [--profile PROFILE] [--target TARGET] [--port PORT] + [--no-browser] ``` You may specify a different port using the `--port` flag. diff --git a/website/docs/reference/commands/deps.md b/website/docs/reference/commands/deps.md index 4966044d648..a52f07bb934 100644 --- a/website/docs/reference/commands/deps.md +++ b/website/docs/reference/commands/deps.md @@ -3,7 +3,7 @@ title: "deps" id: "deps" --- -`dbt deps` pulls the most recent version of the dependencies listed in your `packages.yml` from git. See [Package-Management](package-management) for more information. +`dbt deps` pulls the most recent version of the dependencies listed in your `packages.yml` from git. See [Package-Management](/docs/build/packages) for more information. @@ -19,8 +19,8 @@ Where relevant, dbt will display up to date and/or latest versions of packages t packages: - package: dbt-labs/dbt_utils version: 0.7.1 - - package: tailsdotcom/dbt_artifacts - version: 0.5.0-a1 + - package: brooklyn-data/dbt_artifacts + version: 1.2.0 install-prerelease: true - package: dbt-labs/codegen version: 0.4.0 @@ -39,9 +39,8 @@ packages: Installing dbt-labs/dbt_utils@0.7.1 Installed from version 0.7.1 Up to date! -Installing tailsdotcom/dbt_artifacts@0.5.0a1 - Installed from version 0.5.0a1 - Updated version available: 0.5.0 +Installing brooklyn-data/dbt_artifacts@1.2.0 + Installed from version 1.2.0 Installing dbt-labs/codegen@0.4.0 Installed from version 0.4.0 Up to date! diff --git a/website/docs/reference/commands/init.md b/website/docs/reference/commands/init.md index 95eb6d3fff7..c2f67e836cd 100644 --- a/website/docs/reference/commands/init.md +++ b/website/docs/reference/commands/init.md @@ -13,12 +13,12 @@ The `init` command is interactive and responsive like never before. If this is your first time ever using the tool, it will: - ask you to name your project -- ask you which database adapter you're using (or to [install the one you need](available-adapters)) +- ask you which database adapter you're using (or to [Supported Data Platforms](supported-data-platforms)) - prompt you for each piece of information that dbt needs to connect to that database: things like `account`, `user`, `password`, etc Then, it will: - Create a new folder with your project name and sample files, enough to get you started with dbt -- Create a connection profile on your local machine. The default location is `~/.dbt/profiles.yml`. Read more in [configuring your profile](configure-your-profile). +- Create a connection profile on your local machine. The default location is `~/.dbt/profiles.yml`. Read more in [configuring your profile](/docs/get-started/connection-profiles). ## Existing project @@ -28,7 +28,7 @@ If you've just cloned or downloaded an existing dbt project, `dbt init` can stil `dbt init` knows how to prompt for connection information by looking for a file named `profile_template.yml`. It will look for this file in two places: -- **Adapter plugin:** What's the bare minumum Postgres profile? What's the type of each field, what are its defaults? This information is stored in a file called [`dbt/include/postgres/profile_template.yml`](https://github.com/dbt-labs/dbt-core/blob/main/plugins/postgres/dbt/include/postgres/profile_template.yml). If you're the maintainer of an adapter plugin, we highly recommend that you add a `profile_template.yml` to your plugin, too. See more details in [building-a-new-adapter](building-a-new-adapter). +- **Adapter plugin:** What's the bare minumum Postgres profile? What's the type of each field, what are its defaults? This information is stored in a file called [`dbt/include/postgres/profile_template.yml`](https://github.com/dbt-labs/dbt-core/blob/main/plugins/postgres/dbt/include/postgres/profile_template.yml). If you're the maintainer of an adapter plugin, we highly recommend that you add a `profile_template.yml` to your plugin, too. See more details in [building-a-new-adapter](/guides/advanced/adapter-development/3-building-a-new-adapter). - **Existing project:** If you're the maintainer of an existing project, and you want to help new users get connected to your database quickly and easily, you can include your own custom `profile_template.yml` in the root of your project, alongside `dbt_project.yml`. For common connection attributes, set the values in `fixed`; leave user-specific attributes in `prompts`, but with custom hints and defaults as you'd like. @@ -65,4 +65,4 @@ user (yourname@jaffleshop.com): summerintern@jaffleshop.com schema (usually dbt_): dbt_summerintern threads (your favorite number, 1-10) [8]: 6 Profile internal-snowflake written to /Users/intern/.dbt/profiles.yml using project's profile_template.yml and your supplied values. Run 'dbt debug' to validate the connection. -``` \ No newline at end of file +``` diff --git a/website/docs/reference/commands/list.md b/website/docs/reference/commands/list.md index 1d4634dc706..745c12d3862 100644 --- a/website/docs/reference/commands/list.md +++ b/website/docs/reference/commands/list.md @@ -5,7 +5,7 @@ id: "list" ## Overview -The `dbt ls` command lists resources in your dbt project. It accepts selector arguments that are similar to those provided in [dbt run](run). `dbt list` is an alias for `dbt ls`. +The `dbt ls` command lists resources in your dbt project. It accepts selector arguments that are similar to those provided in [dbt run](run). `dbt list` is an alias for `dbt ls`. While `dbt ls` will read your [connection profile](/docs/get-started/connection-profiles) to resolve [`target`](dbt-jinja-functions/target)-specific logic, this command will not connect to your database or run any queries. ### Usage ``` diff --git a/website/docs/reference/commands/rpc.md b/website/docs/reference/commands/rpc.md index 5558bfa721b..e2d7c448956 100644 --- a/website/docs/reference/commands/rpc.md +++ b/website/docs/reference/commands/rpc.md @@ -238,7 +238,7 @@ All RPC requests accept the following parameters in addition to the parameters l ``` Several of the following request types accept these additional parameters: -- `threads`: The number of [threads](configure-your-profile#understanding-threads) to use when compiling (optional) +- `threads`: The number of [threads](/docs/get-started/connection-profiles#understanding-threads) to use when compiling (optional) - `select`: The space-delimited set of resources to execute (optional). (`models` is also supported on some request types for backwards compatibility.) - `selector`: The name of a predefined [YAML selector](node-selection/yaml-selectors) that defines the set of resources to execute (optional) - `exclude`: The space-delimited set of resources to exclude from compiling, running, testing, seeding, or snapshotting (optional) @@ -326,7 +326,7 @@ Several of the following request types accept these additional parameters: } ``` -### Run snapshots ([docs](seed)) +### Run snapshots ([docs](/docs/build/snapshots)) ```json { diff --git a/website/docs/reference/commands/run.md b/website/docs/reference/commands/run.md index f69ced4a4f6..0b775a157f0 100644 --- a/website/docs/reference/commands/run.md +++ b/website/docs/reference/commands/run.md @@ -21,7 +21,7 @@ support transactions. ## Refresh incremental models -If you provide the `--full-refresh` argument to `dbt run`, dbt will treat incremental models as models. This is useful when +If you provide the `--full-refresh` flag to `dbt run`, dbt will treat incremental models as models. This is useful when 1. The schema of an incremental model changes and you need to recreate it. 2. You want to reprocess the entirety of the incremental model because of new logic in the model code. @@ -34,6 +34,12 @@ dbt run --full-refresh + + +You can also supply the flag by its short name: `dbt run -f`. + + + In the dbt compilation context, this flag will be available as [flags.FULL_REFRESH](flags). Further, the `is_incremental()` macro will return `false` for *all* models in response when the `--full-refresh` flag is specified. diff --git a/website/docs/reference/commands/seed.md b/website/docs/reference/commands/seed.md index 3a0227d8cbd..e60ceced0d3 100644 --- a/website/docs/reference/commands/seed.md +++ b/website/docs/reference/commands/seed.md @@ -10,7 +10,7 @@ id: "seed" -The `dbt seed` command will load `csv` files located in the `seed-paths` directory of your dbt project into your data warehouse. +The `dbt seed` command will load `csv` files located in the `seed-paths` directory of your dbt project into your . ### Selecting seeds to run diff --git a/website/docs/reference/commands/source.md b/website/docs/reference/commands/source.md index acf2bec9392..230b92669a1 100644 --- a/website/docs/reference/commands/source.md +++ b/website/docs/reference/commands/source.md @@ -17,7 +17,7 @@ If you're using an older version of dbt Core (before v0.21), the old name of the -If your dbt project is [configured with sources](using-sources), then the `dbt source freshness` command will query all of your defined source tables, determining the "freshness" of these tables. If the tables are stale (based on the `freshness` config specified for your sources) then dbt will report a warning or error accordingly. If a source is in a stale state, then dbt will exit with a nonzero exit code. +If your dbt project is [configured with sources](/docs/build/sources), then the `dbt source freshness` command will query all of your defined source tables, determining the "freshness" of these tables. If the tables are stale (based on the `freshness` config specified for your sources) then dbt will report a warning or error accordingly. If a source is in a stale state, then dbt will exit with a nonzero exit code. ### Specifying sources to snapshot @@ -39,7 +39,7 @@ $ dbt source freshness --select source:snowplow.event ### Configuring source freshness output -When `dbt source freshness` completes, a JSON file containing information about the freshness of your sources will be saved to `target/sources.json`. An example `sources.json` will look like: +When `dbt source freshness` completes, a file containing information about the freshness of your sources will be saved to `target/sources.json`. An example `sources.json` will look like: @@ -88,4 +88,4 @@ Snapshots of source freshness can be used to understand: This command can be run manually to determine the state of your source data freshness at any time. It is also recommended that you run this command on a schedule, storing the results of the freshness snapshot at regular intervals. These longitudinal snapshots will make it possible to be alerted when source data freshness SLAs are violated, as well as understand the trend of freshness over time. -dbt Cloud makes it easy to snapshot source freshness on a schedule, and provides a dashboard out of the box indicating the state of freshness for all of the sources defined in your project. For more information on snapshotting freshness in dbt Cloud, check out the [docs](cloud-snapshotting-source-freshness). +dbt Cloud makes it easy to snapshot source freshness on a schedule, and provides a dashboard out of the box indicating the state of freshness for all of the sources defined in your project. For more information on snapshotting freshness in dbt Cloud, check out the [docs](/docs/build/sources#snapshotting-source-data-freshness). diff --git a/website/docs/reference/commands/test.md b/website/docs/reference/commands/test.md index f83b3f6dc30..27d6f62e260 100644 --- a/website/docs/reference/commands/test.md +++ b/website/docs/reference/commands/test.md @@ -27,4 +27,4 @@ dbt test --select one_specific_model,test_type:singular dbt test --select one_specific_model,test_type:generic ``` -For more information on writing tests, see the [Testing Documentation](building-a-dbt-project/tests). +For more information on writing tests, see the [Testing Documentation](/docs/build/tests). diff --git a/website/docs/reference/configs-and-properties.md b/website/docs/reference/configs-and-properties.md index 6a9b8842e71..436f06f704d 100644 --- a/website/docs/reference/configs-and-properties.md +++ b/website/docs/reference/configs-and-properties.md @@ -15,8 +15,8 @@ A rule of thumb: properties declare things _about_ your project resources; confi For example, you can use resource **properties** to: * Describe models, snapshots, seed files, and their columns -* Assert "truths" about a model, in the form of [tests](building-a-dbt-project/tests), e.g. "this `id` column is unique" -* Define pointers to existing tables that contain raw data, in the form of [sources](using-sources), and assert the expected "freshness" of this raw data +- Assert "truths" about a model, in the form of [tests](/docs/build/tests), e.g. "this `id` column is unique" +* Define pointers to existing tables that contain raw data, in the form of [sources](/docs/build/sources), and assert the expected "freshness" of this raw data * Define official downstream uses of your data models, in the form of [exposures](exposures) Whereas you can use **configurations** to: @@ -29,13 +29,15 @@ Whereas you can use **configurations** to: Depending on the resource type, configurations can be defined: -1. Using a [`config()` Jinja macro](dbt-jinja-functions/config) within a `model`, `snapshot`, or `test` SQL file +1. Using a [`config()` Jinja macro](/reference/dbt-jinja-functions/config) within a `model`, `snapshot`, or `test` SQL file 2. Using a [`config` property](resource-properties/config) in a `.yml` file 3. From the [`dbt_project.yml` file](dbt_project.yml), under the corresponding resource key (`models:`, `snapshots:`, `tests:`, etc) ### Config inheritance -Configurations are prioritized in order of specificity, which is generally the order above: an in-file `config()` block takes precedence over properties defied in a `.yml` file, which takes precedence over a config defined in the project file. (Note that generic tests work a little differently when it comes to specificity. See [test configs](test-configs).) +dbt prioritizes configurations in order of specificity, from most specificity to least specificity. This generally follows the order above: an in-file `config()` block --> properties defined in a `.yml` file --> config defined in the project file. + +Note - Generic tests work a little differently when it comes to specificity. See [test configs](test-configs). Within the project file, configurations are also applied hierarchically. The most-specific config always "wins": In the project file, configurations applied to a `marketing` subdirectory will take precedence over configurations applied to the entire `jaffle_shop` project. To apply a configuration to a model, or directory of models, define the resource path as nested dictionary keys. @@ -77,7 +79,7 @@ Certain properties are special, because: These properties are: - [`description`](resource-properties/description) - [`tests`](resource-properties/tests) -- [`docs`](resource-properties/docs) +- [`docs`](/reference/resource-configs/docs) - [`columns`](resource-properties/columns) - [`quote`](resource-properties/quote) - [`source` properties](source-properties) (e.g. `loaded_at_field`, `freshness`) @@ -163,12 +165,12 @@ You can find an exhaustive list of each supported property and config, broken do * Exposure [properties](exposure-properties) ## FAQs - - - - - - + + + + + + ## Troubleshooting common errors diff --git a/website/docs/reference/dbt-classes.md b/website/docs/reference/dbt-classes.md index 16c06e8b387..20e7637ba7a 100644 --- a/website/docs/reference/dbt-classes.md +++ b/website/docs/reference/dbt-classes.md @@ -2,7 +2,7 @@ title: "dbt Classes" --- -dbt has a number of classes it uses to represent objects in a data warehouse, parts of a dbt project, and the results of a command. +dbt has a number of classes it uses to represent objects in a , parts of a dbt project, and the results of a command. These classes are often useful when building advanced dbt models and macros. diff --git a/website/docs/reference/dbt-commands.md b/website/docs/reference/dbt-commands.md index 09a78741f8f..37f3d234837 100644 --- a/website/docs/reference/dbt-commands.md +++ b/website/docs/reference/dbt-commands.md @@ -3,7 +3,7 @@ title: "dbt Command reference" --- dbt is typically run one of two ways: -* In [dbt Cloud](the-dbt-ide) +* In [dbt Cloud](/docs/get-started/develop-in-the-cloud) * On the command line The following sections outline the commands supported by dbt and their relevant flags. Note that some commands are only supported when using the CLI. diff --git a/website/docs/reference/dbt-jinja-functions/adapter.md b/website/docs/reference/dbt-jinja-functions/adapter.md index bfde2ba669c..1c11d1c8ff7 100644 --- a/website/docs/reference/dbt-jinja-functions/adapter.md +++ b/website/docs/reference/dbt-jinja-functions/adapter.md @@ -12,13 +12,13 @@ The following functions are available: - [adapter.dispatch](dispatch) - [adapter.get_missing_columns](#get_missing_columns) - [adapter.expand_target_column_types](#expand_target_column_types) -- [adapter.get_relation](#get_relation) -- [adapter.load_relation](#load_relation) +- [adapter.get_relation](#get_relation) or [load_relation](#load_relation) - [adapter.get_columns_in_relation](#get_columns_in_relation) - [adapter.create_schema](#create_schema) - [adapter.drop_schema](#drop_schema) - [adapter.drop_relation](#drop_relation) - [adapter.rename_relation](#rename_relation) +- [adapter.quote](#quote) ### Deprecated adapter functions @@ -75,7 +75,7 @@ Expand the `to_relation` 's column types to match the schema {% set tmp_relation = adapter.get_relation(...) %} {% set target_relation = adapter.get_relation(...) %} -{% do adapter.expand_target_column_types(tmp_realtion, target_relation) %} +{% do adapter.expand_target_column_types(tmp_relation, target_relation) %} ``` @@ -88,7 +88,7 @@ __Args__: * `schema`: The schema of the relation to fetch * `identifier`: The identifier of the relation to fetch -Returns a [Relation](dbt-classes#relation) object identified by the `database.schema.identifier` provided to the method, or `None` if the relation does not exist. +Returns a cached [Relation](dbt-classes#relation) object identified by the `database.schema.identifier` provided to the method, or `None` if the relation does not exist. **Usage**: @@ -112,7 +112,7 @@ __Args__: * `relation`: The [Relation](dbt-classes#relation) to try to load -A convenience wrapper for [get_relation](#get_relation). Returns another copy of the same [Relation](dbt-classes#relation) object, or `None` if the relation does not exist. +A convenience wrapper for [get_relation](#get_relation). Returns the cached version of the [Relation](dbt-classes#relation) object, or `None` if the relation does not exist. **Usage**: @@ -120,7 +120,7 @@ A convenience wrapper for [get_relation](#get_relation). Returns another copy of ```sql -{% set relation_exists = (adapter.load_relation(ref('my_model')) is not none %} +{% set relation_exists = (load_relation(ref('my_model')) is not none %} {% if relation_exists %} {{ log("my_model has already been built", info=true) }} {% else %} @@ -240,6 +240,26 @@ Renames a Relation the database. The `rename_relation` method will rename the s + +## quote +__Args__: + + * `identifier`: A string to quote + +Encloses `identifier` in the correct quotes for the adapter when escaping reserved column names etc. + +**Usage:** + + + +```sql +select + 'abc' as {{ adapter.quote('table_name') }}, + 'def' as {{ adapter.quote('group by') }} +``` + + + ## get_columns_in_table :::danger Deprecated diff --git a/website/docs/reference/dbt-jinja-functions/builtins.md b/website/docs/reference/dbt-jinja-functions/builtins.md index 57d6750cec1..f3ee4313f2c 100644 --- a/website/docs/reference/dbt-jinja-functions/builtins.md +++ b/website/docs/reference/dbt-jinja-functions/builtins.md @@ -10,10 +10,11 @@ The `builtins` variable exists to provide references to builtin dbt context meth The `builtins` variable is a dictionary containing the following keys: - [ref](ref) -- [source](dbt-jinja-functions/source) -- [config](dbt-jinja-functions/config) +- [source](/reference/dbt-jinja-functions/source) +- [config](/reference/dbt-jinja-functions/config) ## Usage + The following macro overrides the `ref` method available in the model compilation context to return a [Relation](dbt-classes#relation) with the database name overriden to `dev`. ``` diff --git a/website/docs/reference/dbt-jinja-functions/config.md b/website/docs/reference/dbt-jinja-functions/config.md index 3bc0d1c7f3f..616d8cd6d9c 100644 --- a/website/docs/reference/dbt-jinja-functions/config.md +++ b/website/docs/reference/dbt-jinja-functions/config.md @@ -24,6 +24,10 @@ is responsible for handling model code that looks like this: }} ``` +Review [Model configurations](/reference/model-configs) for examples and more information on valid arguments. +https://docs.getdbt.com/reference/model-configs + + ## config.get __Args__: diff --git a/website/docs/reference/dbt-jinja-functions/cross-database-macros.md b/website/docs/reference/dbt-jinja-functions/cross-database-macros.md new file mode 100644 index 00000000000..a7d8f2bc54b --- /dev/null +++ b/website/docs/reference/dbt-jinja-functions/cross-database-macros.md @@ -0,0 +1,953 @@ +--- +title: "cross-database macros" +id: "cross-database-macros" +--- + +# Cross-database macros + +## Overview + +These macros benefit three different user groups: +- If you maintain a package, your package is more likely to work on other adapters by using these macros (rather than a specific database's SQL syntax) +- If you maintain an adapter, your adapter is more likely to support more packages by implementing (and testing) these macros. +- If you're an end user, more packages and adapters are likely to "just work" for you (without you having to do anything). + +:::note Note +Please make sure to take a look at the [SQL expressions section](#sql-expressions) to understand quoting syntax for string values and date literals. +::: + +## All functions (alphabetical) + + + +- [any_value](#any_value) +- [bool_or](#bool_or) +- [cast_bool_to_text](#cast_bool_to_text) +- [concat](#concat) +- [dateadd](#dateadd) +- [datediff](#datediff) +- [date_trunc](#date_trunc) +- [escape_single_quotes](#escape_single_quotes) +- [except](#except) +- [hash](#hash) +- [intersect](#intersect) +- [last_day](#last_day) +- [length](#length) +- [listagg](#listagg) +- [position](#position) +- [replace](#replace) +- [right](#right) +- [safe_cast](#safe_cast) +- [split_part](#split_part) +- [string_literal](#string_literal) +- [type_bigint](#type_bigint) +- [type_float](#type_float) +- [type_int](#type_int) +- [type_numeric](#type_numeric) +- [type_string](#type_string) +- [type_timestamp](#type_timestamp) + + + + +- [any_value](#any_value) +- [array_append](#array_append) +- [array_concat](#array_concat) +- [array_construct](#array_construct) +- [bool_or](#bool_or) +- [cast_bool_to_text](#cast_bool_to_text) +- [concat](#concat) +- [dateadd](#dateadd) +- [datediff](#datediff) +- [date_trunc](#date_trunc) +- [escape_single_quotes](#escape_single_quotes) +- [except](#except) +- [hash](#hash) +- [intersect](#intersect) +- [last_day](#last_day) +- [length](#length) +- [listagg](#listagg) +- [position](#position) +- [replace](#replace) +- [right](#right) +- [safe_cast](#safe_cast) +- [split_part](#split_part) +- [string_literal](#string_literal) +- [type_bigint](#type_bigint) +- [type_boolean](#type_boolean) +- [type_float](#type_float) +- [type_int](#type_int) +- [type_numeric](#type_numeric) +- [type_string](#type_string) +- [type_timestamp](#type_timestamp) + + + + + +[**Data type functions**](#data-type-functions) +- [type_bigint](#type_bigint) +- [type_float](#type_float) +- [type_int](#type_int) +- [type_numeric](#type_numeric) +- [type_string](#type_string) +- [type_timestamp](#type_timestamp) + + + + +[**Data type functions**](#data-type-functions) +- [type_bigint](#type_bigint) +- [type_boolean](#type_boolean) +- [type_float](#type_float) +- [type_int](#type_int) +- [type_numeric](#type_numeric) +- [type_string](#type_string) +- [type_timestamp](#type_timestamp) + + + +[**Set functions**](#set-functions) +- [except](#except) +- [intersect](#intersect) + + + +[**Array functions**](#array-functions) +- [array_append](#array_append) +- [array_concat](#array_concat) +- [array_construct](#array_construct) + + + +[**String functions**](#string-functions) +- [concat](#concat) +- [hash](#hash) +- [length](#length) +- [position](#position) +- [replace](#replace) +- [right](#right) +- [split_part](#split_part) + +[**String literal functions**](#string-literal-functions) +- [escape_single_quotes](#escape_single_quotes) +- [string_literal](#string_literal) + +[**Aggregate and window functions**](#aggregate-and-window-functions) +- [any_value](#any_value) +- [bool_or](#bool_or) +- [listagg](#listagg) + +[**Cast functions**](#cast-functions) +- [cast_bool_to_text](#cast_bool_to_text) +- [safe_cast](#safe_cast) + +[**Date and time functions**](#date-and-time-functions) +- [dateadd](#dateadd) +- [datediff](#datediff) +- [date_trunc](#date_trunc) +- [last_day](#last_day) + +## Data type functions + +### type_bigint +__Args__: + + * None + +This macro yields the database-specific data type for a `BIGINT`. + +**Usage**: + +```sql +{{ dbt.type_bigint() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +bigint +``` + + + +### type_boolean +__Args__: + + * None + +This macro yields the database-specific data type for a `BOOLEAN`. + +**Usage**: + +```sql +{{ dbt.type_boolean() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +BOOLEAN +``` + + + +### type_float +__Args__: + + * None + +This macro yields the database-specific data type for a `FLOAT`. + +**Usage**: + +```sql +{{ dbt.type_float() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +FLOAT +``` + +### type_int +__Args__: + + * None + +This macro yields the database-specific data type for an `INT`. + +**Usage**: + +```sql +{{ dbt.type_int() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +INT +``` + +### type_numeric + +__Args__: + + * None + +This macro yields the database-specific data type for a `NUMERIC`. + +**Usage**: + +```sql +{{ dbt.type_numeric() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +numeric(28,6) +``` + +### type_string +__Args__: + + * None + +This macro yields the database-specific data type for `TEXT`. + +**Usage**: + +```sql +{{ dbt.type_string() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +TEXT +``` + +### type_timestamp +__Args__: + + * None + +This macro yields the database-specific data type for a `TIMESTAMP` (which may or may not match the behavior of `TIMESTAMP WITHOUT TIMEZONE` from ANSI SQL-92). + +**Usage**: + +```sql +{{ dbt.type_timestamp() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +TIMESTAMP +``` + +## Set functions + +### except +__Args__: + + * None + +`except` is one of the set operators specified ANSI SQL-92 (along with `union` and `intersect`) and is akin to [set difference](https://en.wikipedia.org/wiki/Complement_(set_theory)#Relative_complement). + +**Usage**: + +```sql +{{ dbt.except() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +except +``` + +### intersect +__Args__: + + * None + +`intersect` is one of the set operators specified ANSI SQL-92 (along with `union` and `except`) and is akin to [set intersection](https://en.wikipedia.org/wiki/Intersection_(set_theory)). + +**Usage**: + +```sql +{{ dbt.intersect() }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +intersect +``` + + + +## Array functions + +### array_append +__Args__: + + * `array` (required): The array to append to. + * `new_element` (required): The element to be appended. This element must *match the data type of the existing elements* in the array in order to match PostgreSQL functionality and *not null* to match BigQuery functionality. + +This macro appends an element to the end of an array and returns the appended array. + +**Usage**: + +```sql +{{ dbt.array_append("array_column", "element_column") }} +{{ dbt.array_append("array_column", "5") }} +{{ dbt.array_append("array_column", "'blue'") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +array_append(array_column, element_column) +array_append(array_column, 5) +array_append(array_column, 'blue') +``` + +### array_concat +__Args__: + + * `array_1` (required): The array to append to. + * `array_2` (required): The array to be appended to `array_1`. This array must match the data type of `array_1` in order to match PostgreSQL functionality. + +This macro returns the concatenation of two arrays. + +**Usage**: + +```sql +{{ dbt.array_concat("array_column_1", "array_column_2") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +array_cat(array_column_1, array_column_2) +``` + +### array_construct +__Args__: + + * `inputs` (optional): The list of array contents. If not provided, this macro will create an empty array. All inputs must be the *same data type* in order to match PostgreSQL functionality and *not null* to match BigQuery functionality. + * `data_type` (optional): Specifies the data type of the constructed array. This is only relevant when creating an empty array (will otherwise use the data type of the inputs). If `inputs` are `data_type` are both not provided, this macro will create an empty array of type integer. + +This macro returns an array constructed from a set of inputs. + +**Usage**: + +```sql +{{ dbt.array_construct(["column_1", "column_2", "column_3"]) }} +{{ dbt.array_construct([], "integer") }} +{{ dbt.array_construct([1, 2, 3, 4]) }} +{{ dbt.array_construct(["'blue'", "'green'"]) }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +array[ column_1 , column_2 , column_3 ] +array[]::integer[] +array[ 1 , 2 , 3 , 4 ] +array[ 'blue' , 'green' ] +``` + + + +## String functions + +### concat +__Args__: + + * `fields`: Jinja array of [attribute names or expressions](#sql-expressions). + +This macro combines a list of strings together. + +**Usage**: + +```sql +{{ dbt.concat(["column_1", "column_2"]) }} +{{ dbt.concat(["year_column", "'-'" , "month_column", "'-'" , "day_column"]) }} +{{ dbt.concat(["first_part_column", "'.'" , "second_part_column"]) }} +{{ dbt.concat(["first_part_column", "','" , "second_part_column"]) }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +column_1 || column_2 +year_column || '-' || month_column || '-' || day_column +first_part_column || '.' || second_part_column +first_part_column || ',' || second_part_column +``` + +### hash +__Args__: + + * `field`: [attribute name or expression](#sql-expressions). + +This macro provides a hash (such as [MD5](https://en.wikipedia.org/wiki/MD5)) of an [expression](#sql-expressions) cast as a string. + +**Usage**: + +```sql +{{ dbt.hash("column") }} +{{ dbt.hash("'Pennsylvania'") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +md5(cast(column as + varchar +)) +md5(cast('Pennsylvania' as + varchar +)) +``` + +### length +__Args__: + + * `expression`: string [expression](#sql-expressions). + + +This macro calculates the number of characters in a string. + +**Usage**: + +```sql +{{ dbt.length("column") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + length( + column + ) +``` + +### position +__Args__: + + * `substring_text`: [attribute name or expression](#sql-expressions). + * `string_text`: [attribute name or expression](#sql-expressions). + +This macro searches for the first occurrence of `substring_text` within `string_text` and returns the 1-based position if found. + +**Usage**: + +```sql +{{ dbt.position("substring_column", "text_column") }} +{{ dbt.position("'-'", "text_column") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + position( + substring_column in text_column + ) + + position( + '-' in text_column + ) +``` + +### replace +__Args__: + + * `field`: [attribute name or expression](#sql-expressions). + * `old_chars`: [attribute name or expression](#sql-expressions). + * `new_chars`: [attribute name or expression](#sql-expressions). + +This macro updates a string and replaces all occurrences of one substring with another. The precise behavior may vary slightly from one adapter to another. + +**Usage**: + +```sql +{{ dbt.replace("string_text_column", "old_chars_column", "new_chars_column") }} +{{ dbt.replace("string_text_column", "'-'", "'_'") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + replace( + string_text_column, + old_chars_column, + new_chars_column + ) + + replace( + string_text_column, + '-', + '_' + ) +``` + +### right +__Args__: + + * `string_text`: [attribute name or expression](#sql-expressions). + * `length_expression`: numeric [expression](#sql-expressions). + +This macro returns the N rightmost characters from a string. + +**Usage**: + +```sql +{{ dbt.right("string_text_column", "length_column") }} +{{ dbt.right("string_text_column", "3") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + right( + string_text_column, + length_column + ) + + right( + string_text_column, + 3 + ) +``` + +### split_part +__Args__: + +* `string_text` (required): Text to be split into parts. +* `delimiter_text` (required): Text representing the delimiter to split by. +* `part_number` (required): Requested part of the split (1-based). If the value is negative, the parts are counted backward from the end of the string. + +This macro splits a string of text using the supplied delimiter and returns the supplied part number (1-indexed). + +**Usage**: + +When referencing a column, use one pair of quotes. When referencing a string, use single quotes enclosed in double quotes. + +```sql +{{ dbt.split_part(string_text='column_to_split', delimiter_text='delimiter_column', part_number=1) }} +{{ dbt.split_part(string_text="'1|2|3'", delimiter_text="'|'", part_number=1) }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + split_part( + column_to_split, + delimiter_column, + 1 + ) + + split_part( + '1|2|3', + '|', + 1 + ) +``` + +## String literal functions + +### escape_single_quotes +__Args__: + + * `value`: Jinja string literal value + +This macro adds escape characters for any single quotes within the provided string literal. Note: if given a column, it will only operate on the column _name_, not the values within the column. + +To escape quotes for column values, consider a macro like [replace](#replace) or a regular expression replace. + +**Usage**: + +```sql +{{ dbt.escape_single_quotes("they're") }} +{{ dbt.escape_single_quotes("ain't ain't a word") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +they''re +ain''t ain''t a word +``` + +### string_literal +__Args__: + + * `value`: Jinja string value + +This macro converts a Jinja string into a SQL string literal. + +To cast column values to a string, consider a macro like [safe_cast](#safe_cast) or an ordinary cast. + +**Usage**: + +```sql +select {{ dbt.string_literal("Pennsylvania") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +select 'Pennsylvania' +``` + +## Aggregate and window functions + +### any_value +__Args__: + + * `expression`: an [expression](#sql-expressions). + +This macro returns some value of the expression from the group. The selected value is non-deterministic (rather than random). + +**Usage**: + +```sql +{{ dbt.any_value("column_name") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +any(column_name) +``` + +### bool_or +__Args__: + + * `expression`: [attribute name or expression](#sql-expressions). + +This macro returns the logical `OR` of all non-`NULL` expressions -- `true` if at least one record in the group evaluates to `true`. + +**Usage**: + +```sql +{{ dbt.bool_or("boolean_column") }} +{{ dbt.bool_or("integer_column = 3") }} +{{ dbt.bool_or("string_column = 'Pennsylvania'") }} +{{ dbt.bool_or("column1 = column2") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +bool_or(boolean_column) +bool_or(integer_column = 3) +bool_or(string_column = 'Pennsylvania') +bool_or(column1 = column2) +``` + +### listagg +__Args__: + + * `measure` (required): The [attribute name or expression](#sql-expressions) that determines the values to be concatenated. To only include distinct values add keyword `DISTINCT` to beginning of expression (example: 'DISTINCT column_to_agg'). + * `delimiter_text` (required): Text representing the delimiter to separate concatenated values by. + * `order_by_clause` (optional): An expression (typically one or more column names separated by commas) that determines the order of the concatenated values. + * `limit_num` (optional): Specifies the maximum number of values to be concatenated. + +This macro returns the concatenated input values from a group of rows separated by a specified delimiter. + +**Usage**: + +Note: If there are instances of `delimiter_text` within your `measure`, you cannot include a `limit_num`. + +```sql +{{ dbt.listagg(measure="column_to_agg", delimiter_text="','", order_by_clause="order by order_by_column", limit_num=10) }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +array_to_string( + (array_agg( + column_to_agg + order by order_by_column + ))[1:10], + ',' + ) +``` + +## Cast functions + +### cast_bool_to_text +__Args__: + + * `field`: boolean [attribute name or expression](#sql-expressions). + +This macro casts a boolean value to a string. + +**Usage**: + +```sql +{{ dbt.cast_bool_to_text("boolean_column_name") }} +{{ dbt.cast_bool_to_text("false") }} +{{ dbt.cast_bool_to_text("true") }} +{{ dbt.cast_bool_to_text("0 = 1") }} +{{ dbt.cast_bool_to_text("1 = 1") }} +{{ dbt.cast_bool_to_text("null") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + cast(boolean_column_name as + varchar +) + + cast(false as + varchar +) + + cast(true as + varchar +) + + cast(0 = 1 as + varchar +) + + cast(1 = 1 as + varchar +) + + cast(null as + varchar +) +``` + +### safe_cast +__Args__: + + * `field`: [attribute name or expression](#sql-expressions). + * `type`: data type to convert to + +For databases that support it, this macro will return `NULL` when the cast fails (instead of raising an error). + +**Usage**: + +```sql +{{ dbt.safe_cast("column_1", api.Column.translate_type("string")) }} +{{ dbt.safe_cast("column_2", api.Column.translate_type("integer")) }} +{{ dbt.safe_cast("'2016-03-09'", api.Column.translate_type("date")) }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + cast(column_1 as TEXT) + cast(column_2 as INT) + cast('2016-03-09' as date) +``` + +## Date and time functions + +### dateadd +__Args__: + + * `datepart`: [date or time part](#date-and-time-parts). + * `interval`: integer count of the `datepart` to add (can be positive or negative) + * `from_date_or_timestamp`: date/time [expression](#sql-expressions). + +This macro adds a time/day interval to the supplied date/timestamp. Note: The `datepart` argument is database-specific. + +**Usage**: + +```sql +{{ dbt.dateadd(datepart="day", interval=1, from_date_or_timestamp="'2016-03-09'") }} +{{ dbt.dateadd(datepart="month", interval=-2, from_date_or_timestamp="'2016-03-09'") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + '2016-03-09' + ((interval '10 day') * (1)) + '2016-03-09' + ((interval '10 month') * (-2)) +``` + +### datediff +__Args__: + + * `first_date`: date/time [expression](#sql-expressions). + * `second_date`: date/time [expression](#sql-expressions). + * `datepart`: [date or time part](#date-and-time-parts). + +This macro calculates the difference between two dates. + +**Usage**: + +```sql +{{ dbt.datediff("column_1", "column_2", "day") }} +{{ dbt.datediff("column", "'2016-03-09'", "month") }} +{{ dbt.datediff("'2016-03-09'", "column", "year") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql + ((column_2)::date - (column_1)::date) + + ((date_part('year', ('2016-03-09')::date) - date_part('year', (column)::date)) + * 12 + date_part('month', ('2016-03-09')::date) - date_part('month', (column)::date)) + + (date_part('year', (column)::date) - date_part('year', ('2016-03-09')::date)) +``` + +### date_trunc +__Args__: + + * `datepart`: [date or time part](#date-and-time-parts). + * `date`: date/time [expression](#sql-expressions). + +This macro truncates / rounds a timestamp to the first instant for the given [date or time part](#date-and-time-parts). + +**Usage**: + +```sql +{{ dbt.date_trunc("day", "updated_at") }} +{{ dbt.date_trunc("month", "updated_at") }} +{{ dbt.date_trunc("year", "'2016-03-09'") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +date_trunc('day', updated_at) +date_trunc('month', updated_at) +date_trunc('year', '2016-03-09') +``` + +### last_day +__Args__: + + * `date`: date/time [expression](#sql-expressions). + * `datepart`: [date or time part](#date-and-time-parts). + +This macro gets the last day for a given date and datepart. + +**Usage**: +- The `datepart` argument is database-specific. +- This macro currently only supports dateparts of `month` and `quarter`. + +```sql +{{ dbt.last_day("created_at", "month") }} +{{ dbt.last_day("'2016-03-09'", "year") }} +``` + +**Sample Output (PostgreSQL)**: + +```sql +cast( + date_trunc('month', created_at) + ((interval '10 month') * (1)) + + ((interval '10 day') * (-1)) + as date) + +cast( + date_trunc('year', '2016-03-09') + ((interval '10 year') * (1)) + + ((interval '10 day') * (-1)) + as date) +``` + +## Date and time parts + +Often supported date and time parts (case insensitive): +* `year` +* `quarter` +* `month` +* `week` +* `day` +* `hour` +* `minute` +* `second` +* `millisecond` +* `microsecond` +* `nanosecond` + +This listing is not meant to be exhaustive, and some of these date and time parts may not be supported for particular adapters. +Some macros may not support all date and time parts. Some adapters may support more or less precision. + +## SQL expressions + +A SQL expression may take forms like the following: +- function +- column name +- date literal +- string literal +- <other data type> literal (number, etc) +- `NULL` + +Example: +Suppose there is an `orders` table with a column named `order_date`. The following shows 3 different types of expressions: +```sql +select + date_trunc(month, order_date) as expression_function, + order_date as expression_column_name, + '2016-03-09' as expression_date_literal, + 'Pennsylvania' as expression_string_literal, + 3 as expression_number_literal, + NULL as expression_null, +from orders +``` + +Note that the string literal example includes single quotes. (Note: the string literal character may vary per database. For this example, we suppose a single quote.) To refer to a SQL string literal in Jinja, surrounding double quotes are required. + +So within Jinja, the string values would be: +- `"date_trunc(month, order_date)"` +- `"order_date"` +- `"'2016-03-09'"` +- `"'Pennsylvania'"` +- `"NULL"` diff --git a/website/docs/reference/dbt-jinja-functions/dbt-project-yml-context.md b/website/docs/reference/dbt-jinja-functions/dbt-project-yml-context.md index 2050d137ce8..71d0ce67628 100644 --- a/website/docs/reference/dbt-jinja-functions/dbt-project-yml-context.md +++ b/website/docs/reference/dbt-jinja-functions/dbt-project-yml-context.md @@ -17,7 +17,7 @@ and `snapshots:` keys in the `dbt_project.yml` file. **Available context variables:** - [target](target) - [env_var](env_var) -- [vars](var) (_Note: only variables defined with `--vars` are availabe_) +- [vars](var) (_Note: only variables defined with `--vars` are available_) - [builtins](builtins) - [dbt_version](dbt_version) diff --git a/website/docs/reference/dbt-jinja-functions/dispatch.md b/website/docs/reference/dbt-jinja-functions/dispatch.md index 75378881e8a..46f17029fef 100644 --- a/website/docs/reference/dbt-jinja-functions/dispatch.md +++ b/website/docs/reference/dbt-jinja-functions/dispatch.md @@ -9,10 +9,10 @@ title: "dispatch" - **v0.20.0:** Parent adapters' macro implementations are included in search order. Formalized supported arguments. - **v0.21.0:** All dispatched macros in the dbt global project include `dbt` namespace - **v1.0.0:** The 'packages' argument is fully deprecated. Use `macro_namespace` and project-level `dispatch` config instead. - + -dbt can extend functionality across [its many supported adapters](available-adapters) through a system of [multiple dispatch](https://en.wikipedia.org/wiki/Multiple_dispatch). Because SQL syntax, data types, and / support vary across adapters, dbt can define and call generic functional macros, and then "dispatch" that macro to the appropriate implementation for the current adapter. +dbt can extend functionality across [Supported Data Platforms](supported-data-platforms) through a system of [multiple dispatch](https://en.wikipedia.org/wiki/Multiple_dispatch). Because SQL syntax, data types, and / support vary across adapters, dbt can define and call generic functional macros, and then "dispatch" that macro to the appropriate implementation for the current adapter. ## Syntax @@ -83,7 +83,7 @@ Below that macro, I've defined three possible implementations of the `concat` ma ### A more complex example -I found an existing implementation of the `concat` macro in the dbt-utils package. However, I want to override its implementation of the `concat` macro on Redshift in particular. In all other cases—including the default implementation—I'm perfectly happy falling back to the implementations defined in [`dbt_utils.concat`](https://github.com/dbt-labs/dbt-utils/blob/master/macros/cross_db_utils/concat.sql). +I found an existing implementation of the `concat` macro in the dbt-utils package. However, I want to override its implementation of the `concat` macro on Redshift in particular. In all other cases—including the default implementation—I'm perfectly happy falling back to the implementations defined in `dbt_utils.concat`. @@ -109,7 +109,7 @@ If I'm running on Redshift, dbt will use my version; if I'm running on any other ## For package maintainers -Dispatched macros from [packages](package-management) _must_ provide the `macro_namespace` argument, as this declares the namespace (package) where it plans to search for candidates. Most often, this is the same as the name of your package, e.g. `dbt_utils`. (It is possible, if rarely desirable, to define a dispatched macro _not_ in the `dbt_utils` package, and dispatch it into the `dbt_utils` namespace.) +Dispatched macros from [packages](/docs/build/packages) _must_ provide the `macro_namespace` argument, as this declares the namespace (package) where it plans to search for candidates. Most often, this is the same as the name of your package, e.g. `dbt_utils`. (It is possible, if rarely desirable, to define a dispatched macro _not_ in the `dbt_utils` package, and dispatch it into the `dbt_utils` namespace.) Here we have the definition of the `dbt_utils.concat` macro, which specifies both the `macro_name` and `macro_namespace` to dispatch: @@ -154,7 +154,7 @@ As a package maintainer, this functionality enables users of my package to exten I maintain an internal utility package at my organization, named `my_org_dbt_helpers`. I use this package to reimplement built-in dbt macros on behalf of all my dbt-using colleagues, who work across a number of dbt projects. -My package can define custom versions of any dispatched global macro I choose, from `generate_schema_name` to `test_unique`. I can define a new default version of that macro (e.g. `default__generate_schema_name`), or custom versions for specific data warehouse adapters (e.g. `spark__generate_schema_name`). +My package can define custom versions of any dispatched global macro I choose, from `generate_schema_name` to `test_unique`. I can define a new default version of that macro (e.g. `default__generate_schema_name`), or custom versions for specific adapters (e.g. `spark__generate_schema_name`). Each root project installing my package simply needs to include the [project-level `dispatch` config](project-configs/dispatch-config) that searches my package ahead of `dbt` for the `dbt` global namespace: @@ -249,4 +249,4 @@ In rare cases, the child adapter may prefer the default implementation to its pa ## FAQs - + diff --git a/website/docs/reference/dbt-jinja-functions/env_var.md b/website/docs/reference/dbt-jinja-functions/env_var.md index b72936d115a..b956ece728a 100644 --- a/website/docs/reference/dbt-jinja-functions/env_var.md +++ b/website/docs/reference/dbt-jinja-functions/env_var.md @@ -26,6 +26,12 @@ profile: If the `DBT_USER` and `DBT_PASSWORD` environment variables are present when dbt is invoked, then these variables will be pulled into the profile as expected. If any environment variables are not set, then dbt will raise a compilation error. +:::info Integer Environment Variables +If passing an environment variable for a property that uses an integer type (for example, `port`, `threads`), be sure to add a filter to the Jinja expression, as shown here. Otherwise, dbt will raise an `['threads']: '1' is not of type 'integer'` error. +`{{ env_var('DBT_THREADS') | int }}` or `{{ env_var('DB_PORT') | as_number }}` + +::: + :::caution Quoting, Curly Brackets, & You Be sure to quote the entire jinja string (as shown above), or else the yaml parser will be confused by the Jinja curly brackets. @@ -59,10 +65,12 @@ models: For certain configurations, you can use "secret" env vars. Any env var named with the prefix `DBT_ENV_SECRET_` will be: - Available for use in `profiles.yml` + `packages.yml`, via the same `env_var()` function -- Disallowed everywhere else, including `dbt_project.yml` and model SQL, to prevent accidentally writing these secret values to the data warehouse or metadata artifacts +- Disallowed everywhere else, including `dbt_project.yml` and model SQL, to prevent accidentally writing these secret values to the or metadata artifacts - Scrubbed from dbt logs and replaced with `*****`, any time its value appears in those logs (even if the env var was not called directly) -The primary use case of secret env vars is git access tokens for [private packages](package-management#private-packages). +The primary use case of secret env vars is git access tokens for [private packages](/docs/build/packages#private-packages). + +**Note:** When dbt is loading profile credentials and package configuration, secret env vars will be replaced with the string value of the environment variable. You cannot modify secrets using Jinja filters, including type-casting filters such as [`as_number`](as_number) or [`as_bool`](as_bool), or pass them as arguments into other Jinja macros. ### Custom metadata @@ -74,6 +82,26 @@ The primary use case of secret env vars is git access tokens for [private packag Any env var named with the prefix `DBT_ENV_CUSTOM_ENV_` will be included in [dbt artifacts](dbt-artifacts#common-metadata), in a `metadata.env` dictionary, with its prefix-stripped name as its key. + + +A dictionary of these prefixed env vars will also be available in a `dbt_metadata_envs` context variable: +```sql +-- {{ dbt_metadata_envs }} + +select 1 as id +``` +```shell +$ DBT_ENV_CUSTOM_ENV_MY_FAVORITE_COLOR=indigo DBT_ENV_CUSTOM_ENV_MY_FAVORITE_NUMBER=6 dbt compile +``` +Compiles to: +```sql +-- {'MY_FAVORITE_COLOR': 'indigo', 'DBT_ENV_CUSTOM_ENV_MY_FAVORITE_NUMBER': '6'} + +select 1 as id +``` + + + :::info dbt Cloud Usage If you are using dbt Cloud, you must adhere to the naming conventions for environment variables. Environment variables in dbt Cloud must be prefixed with `DBT_` (including `DBT_ENV_CUSTOM_ENV_` or `DBT_ENV_SECRET_`). Environment variables keys are uppercased and case sensitive. When referencing `{{env_var('DBT_KEY')}}` in your project's code, the key must match exactly the variable defined in dbt Cloud's UI. ::: diff --git a/website/docs/reference/dbt-jinja-functions/flags.md b/website/docs/reference/dbt-jinja-functions/flags.md index 7e99a83a59b..ba393378269 100644 --- a/website/docs/reference/dbt-jinja-functions/flags.md +++ b/website/docs/reference/dbt-jinja-functions/flags.md @@ -26,3 +26,25 @@ Recommended use cases include: - running hooks conditionally based on the current command / task type, via `flags.WHICH` **Note:** It is _not_ recommended to use flags as an input to parse-time configurations, properties, or dependencies (`ref` + `source`). Flags are likely to change in every invocation of dbt, and their parsed values will become stale (and yield incorrect results) in subsequent invocations that have partial parsing enabled. For more details, see [the docs on parsing](parsing). + + + +### invocation_args_dict + +For the full set of information passed from the CLI—subcommand, flags, arguments—you can use `invocation_args_dict`. This is equivalent to the `args` dictionary in [`run_results.json`](run-results-json). + +```sql +-- models/my_model.sql +-- {{ invocation_args_dict }} +-- {{ dbt_metadata_envs }} + +select 1 as id +``` +Compiles to: +```sql +-- {'write_json': True, 'use_colors': True, 'printer_width': 80, 'version_check': True, 'partial_parse': True, 'static_parser': True, 'profiles_dir': '/Users/.../.dbt', 'send_anonymous_usage_stats': False, 'event_buffer_size': 100000, 'quiet': False, 'no_print': False, 'parse_only': False, 'which': 'compile', 'rpc_method': 'compile', 'indirect_selection': 'eager'} + +select 1 as id +``` + + diff --git a/website/docs/reference/dbt-jinja-functions/modules.md b/website/docs/reference/dbt-jinja-functions/modules.md index 6a188ff3008..baa8da80f13 100644 --- a/website/docs/reference/dbt-jinja-functions/modules.md +++ b/website/docs/reference/dbt-jinja-functions/modules.md @@ -48,3 +48,46 @@ This variable is a pointer to the Python [re](https://docs.python.org/3/library/ ) -%} {% endif %} ``` + + + +## itertools +This variable is a pointer to the Python [itertools](https://docs.python.org/3/library/itertools.html) module, which includes useful functions for working with iterators (loops, lists, and the like). + +The supported functions are: +- `count` +- `cycle` +- `repeat` +- `accumulate` +- `chain` +- `compress` +- `islice` +- `starmap` +- `tee` +- `zip_longest` +- `product` +- `permutations` +- `combinations` +- `combinations_with_replacement` + +**Usage** + +``` +{%- set A = [1, 2] -%} +{%- set B = ['x', 'y', 'z'] -%} +{%- set AB_cartesian = modules.itertools.product(A, B) -%} + +{%- for item in AB_cartesian %} + {{ item }} +{%- endfor -%} +``` +``` + (1, 'x') + (1, 'y') + (1, 'z') + (2, 'x') + (2, 'y') + (2, 'z') +``` + + diff --git a/website/docs/reference/dbt-jinja-functions/print.md b/website/docs/reference/dbt-jinja-functions/print.md index 57cc7436e71..25ced86da62 100644 --- a/website/docs/reference/dbt-jinja-functions/print.md +++ b/website/docs/reference/dbt-jinja-functions/print.md @@ -9,11 +9,6 @@ Use the `print()` function when you want to print messages to both the log file When used in conjunction with the `QUIET` global config, which suppresses non-error logs, you will only see error logs and the print messages in stdout. For more information, see [Global configs](/reference/global-configs). -## Arguments - - * `msg`: The message to print - * `info`: If False, only write to the log file. If True, write to both the log file and stdout (default=False) - ## Example ```sql diff --git a/website/docs/reference/dbt-jinja-functions/ref.md b/website/docs/reference/dbt-jinja-functions/ref.md index d540fc37742..9be20d0a226 100644 --- a/website/docs/reference/dbt-jinja-functions/ref.md +++ b/website/docs/reference/dbt-jinja-functions/ref.md @@ -45,6 +45,8 @@ There is also a two-argument variant of the `ref` function. With this variant, y select * from {{ ref('package_name', 'model_name') }} ``` +**Note:** The `package_name` should only include the name of the package, not the maintainer. For example, if you use the [`fivetran/stripe`](https://hub.getdbt.com/fivetran/stripe/latest/) package, type `stripe` in that argument, and not `fivetran/stripe`. + ### Forcing Dependencies In normal usage, dbt knows the proper order to run all models based on the usage of the `ref` function. There are cases though where dbt doesn't know when a model should be run. An example of this is when a model only references a macro. In that case, dbt thinks the model can run first because no explicit references are made at compilation time. To address this, you can use a SQL comment along with the `ref` function — dbt will understand the dependency, and the compiled query will still be valid: diff --git a/website/docs/reference/dbt-jinja-functions/run_query.md b/website/docs/reference/dbt-jinja-functions/run_query.md index c87ec115efd..ad2de09b080 100644 --- a/website/docs/reference/dbt-jinja-functions/run_query.md +++ b/website/docs/reference/dbt-jinja-functions/run_query.md @@ -12,14 +12,10 @@ Returns a [Table](https://agate.readthedocs.io/page/api/table.html) object with **Note:** The `run_query` macro will not begin a transaction automatically - if you wish to run your query inside of a transaction, please use `begin` and `commit ` statements as appropriate. - :::info Using run_query for the first time? - -Check out the tutorial on [using Jinja](using-jinja#dynamically-retrieve-the-list-of-payment-methods) for an example of working with the results of the `run_query` macro! - +Check out the section of the Getting Started guide on [using Jinja](/docs/get-started/learning-more/using-jinja#dynamically-retrieve-the-list-of-payment-methods) for an example of working with the results of the `run_query` macro! ::: - **Example Usage:** diff --git a/website/docs/reference/dbt-jinja-functions/schemas.md b/website/docs/reference/dbt-jinja-functions/schemas.md index 35b45dfa818..4047d0ef856 100644 --- a/website/docs/reference/dbt-jinja-functions/schemas.md +++ b/website/docs/reference/dbt-jinja-functions/schemas.md @@ -5,7 +5,7 @@ id: "schemas" `schemas` is a variable available in an `on-run-end` hook, representing a list of schemas that dbt built objects in on this run. -If you do not use [custom schemas](using-custom-schemas), `schemas` will evaluate to your target schema, e.g. `['dbt_alice']`. If you use custom schemas, it will include these as well, e.g. `['dbt_alice', 'dbt_alice_marketing', 'dbt_alice_finance']`. +If you do not use [custom schemas](/docs/build/custom-schemas), `schemas` will evaluate to your target schema, e.g. `['dbt_alice']`. If you use custom schemas, it will include these as well, e.g. `['dbt_alice', 'dbt_alice_marketing', 'dbt_alice_finance']`. The `schemas` variable is useful for granting privileges to all schemas that dbt builds relations in, like so (note this is Redshift specific syntax): diff --git a/website/docs/reference/dbt-jinja-functions/set.md b/website/docs/reference/dbt-jinja-functions/set.md new file mode 100644 index 00000000000..d78f5777828 --- /dev/null +++ b/website/docs/reference/dbt-jinja-functions/set.md @@ -0,0 +1,50 @@ +--- +title: "set" +id: "set" +--- + +### set + +_Not to be confused with the `{% set foo = "bar" ... %}` expression in Jinja!_ + +The `set` context method can be used to convert any iterable to a sequence of iterable elements that are unique (a set). + +__Args__: +- `value`: The iterable to convert (e.g. a list) +- `default`: A default value to return if the `value` argument is not a valid iterable + +### Usage + +``` +{% set my_list = [1, 2, 2, 3] %} +{% set my_set = set(my_list) %} +{% do log(my_set) %} {# {1, 2, 3} #} +``` + +``` +{% set my_invalid_iterable = 1234 %} +{% set my_set = set(my_invalid_iterable) %} +{% do log(my_set) %} {# None #} +``` + +### set_strict + +The `set_strict` context method can be used to convert any iterable to a sequence of iterable elements that are unique (a set). The difference to the `set` context method is that the `set_strict` method will raise an exception on a `TypeError`, if the provided value is not a valid iterable and cannot be converted to a set. + +__Args__: +- `value`: The iterable to convert (e.g. a list) + +``` +{% set my_list = [1, 2, 2, 3] %} +{% set my_set = set(my_list) %} +{% do log(my_set) %} {# {1, 2, 3} #} +``` + +``` +{% set my_invalid_iterable = 1234 %} +{% set my_set = set_strict(my_invalid_iterable) %} +{% do log(my_set) %} + +Compilation Error in ... (...) + 'int' object is not iterable +``` diff --git a/website/docs/reference/dbt-jinja-functions/source.md b/website/docs/reference/dbt-jinja-functions/source.md index 38abc670913..a49a3b56049 100644 --- a/website/docs/reference/dbt-jinja-functions/source.md +++ b/website/docs/reference/dbt-jinja-functions/source.md @@ -9,12 +9,12 @@ select * from {{ source(source_name, table_name) }} ## Definition This function: -- Returns a [Relation](dbt-classes#relation) for a [source](using-sources) +- Returns a [Relation](dbt-classes#relation) for a [source](/docs/build/sources) - Creates dependencies between a source and the current model, which is useful for documentation and model selection - Compiles to the full object name in the database ## Related guides -- [Using sources](using-sources) +- [Using sources](/docs/build/sources) ## Arguments * `source_name`: The `name:` defined under a `sources:` key diff --git a/website/docs/reference/dbt-jinja-functions/target.md b/website/docs/reference/dbt-jinja-functions/target.md index 8c3ce3422f8..a001288c2fb 100644 --- a/website/docs/reference/dbt-jinja-functions/target.md +++ b/website/docs/reference/dbt-jinja-functions/target.md @@ -7,8 +7,8 @@ id: "target" * **dbt CLI:** These values are based on the target defined in your [`profiles.yml` file](reference/profiles.yml.md) * **dbt Cloud Scheduler:** - * `target.name` is defined per job as described [here](cloud-setting-a-custom-target-name). - * For all other attributes, the values are defined by the deployment connection. To check any of these values, head to `Environments` (via the hamburger menu), select the relevant deployment environment, and select settings. + * `target.name` is defined per job as described [here](/docs/build/custom-target-names). + * For all other attributes, the values are defined by the deployment connection. To check these values, click **Deploy** from the upper left and select **Environments**. Then, select the relevant deployment environment, and click **Settings**. * **dbt Cloud IDE:** The values are defined by your connection and credentials. To check any of these values, head to your account (via your profile image in the top right hand corner), and select the project under "Credentials". @@ -20,7 +20,7 @@ Some configs are shared between all adapters, while others are adapter-specific. | `target.profile_name` | jaffle_shop | The name of the active profile | | `target.name` | dev | Name of the active target | | `target.schema` | dbt_alice | Name of the dbt schema (or, dataset on BigQuery) | -| `target.type` | postgres | The active adapter being used. One of "postgres", "snowflake", "bigquery", "redshift" | +| `target.type` | postgres | The active adapter being used. One of "postgres", "snowflake", "bigquery", "redshift", "databricks" | | `target.threads` | 4 | The number of threads in use by dbt | diff --git a/website/docs/reference/dbt-jinja-functions/this.md b/website/docs/reference/dbt-jinja-functions/this.md index a7f07f631be..66dbaafa4f2 100644 --- a/website/docs/reference/dbt-jinja-functions/this.md +++ b/website/docs/reference/dbt-jinja-functions/this.md @@ -4,7 +4,7 @@ id: "this" --- `this` is the database representation of the current model. It is useful when: -- Defining a `where` statement within [incremental models](configuring-incremental-models) +- Defining a `where` statement within [incremental models](/docs/build/incremental-models) - Using [pre or post hooks](pre-hook-post-hook) `this` is a [Relation](dbt-classes#relation), and as such, properties such as `{{ this.database }}` and `{{ this.schema }}` compile as expected. @@ -13,6 +13,10 @@ id: "this" ## Examples + + + + ### Grant permissions on a model in a post-hook @@ -26,6 +30,8 @@ models: + + ### Configuring incremental models diff --git a/website/docs/reference/dbt-jinja-functions/var.md b/website/docs/reference/dbt-jinja-functions/var.md index 519ffab643c..8956cebd6b4 100644 --- a/website/docs/reference/dbt-jinja-functions/var.md +++ b/website/docs/reference/dbt-jinja-functions/var.md @@ -28,7 +28,7 @@ Vars supplied to package_name.my_model = { ``` To define a variable in your project, add the `vars:` config to your `dbt_project.yml` file. -See the docs on [using variables](using-variables) for more information on +See the docs on [using variables](/docs/build/project-variables) for more information on defining variables in your dbt project. diff --git a/website/docs/reference/dbt-jinja-functions/zip.md b/website/docs/reference/dbt-jinja-functions/zip.md new file mode 100644 index 00000000000..78c92e18925 --- /dev/null +++ b/website/docs/reference/dbt-jinja-functions/zip.md @@ -0,0 +1,53 @@ +--- +title: "zip" +id: "zip" +--- + +### zip + +The `zip` context method can be used to used to return an iterator of tuples, where the i-th tuple contains the i-th element from each of the argument iterables. ([Python docs](https://docs.python.org/3/library/functions.html#zip)) + :param + :param + +__Args__: +- `*args`: Any number of iterables +- `default`: A default value to return if `*args` is not iterable + +### Usage + +``` +{% set my_list_a = [1, 2] %} +{% set my_list_b = ['alice', 'bob'] %} +{% set my_zip = zip(my_list_a, my_list_b) | list %} +{% do log(my_zip) %} {# [(1, 'alice'), (2, 'bob')] #} +``` + +``` +{% set my_list_a = 12 %} +{% set my_list_b = ['alice', 'bob'] %} +{% set my_zip = zip(my_list_a, my_list_b, default = []) | list %} +{% do log(my_zip) %} {# [] #} +``` + +### zip_strict + +The `zip_strict` context method can be used to used to return an iterator of tuples, just like `zip`. The difference to the `zip` context method is that the `zip_strict` method will raise an exception on a `TypeError`, if one of the provided values is not a valid iterable. + +__Args__: +- `value`: The iterable to convert (e.g. a list) + +``` +{% set my_list_a = [1, 2] %} +{% set my_list_b = ['alice', 'bob'] %} +{% set my_zip = zip_strict(my_list_a, my_list_b) | list %} +{% do log(my_zip) %} {# [(1, 'alice'), (2, 'bob')] #} +``` + +``` +{% set my_list_a = 12 %} +{% set my_list_b = ['alice', 'bob'] %} +{% set my_zip = zip_strict(my_list_a, my_list_b) %} + +Compilation Error in ... (...) + 'int' object is not iterable +``` diff --git a/website/docs/reference/dbt_project.yml.md b/website/docs/reference/dbt_project.yml.md index 2dfe76f2e6c..65855cc8c1c 100644 --- a/website/docs/reference/dbt_project.yml.md +++ b/website/docs/reference/dbt_project.yml.md @@ -62,7 +62,7 @@ tests: [](test-configs) vars: - [](using-variables) + [](/docs/build/project-variables) [on-run-start](project-configs/on-run-start-on-run-end): sql-statement | [sql-statement] [on-run-end](project-configs/on-run-start-on-run-end): sql-statement | [sql-statement] diff --git a/website/docs/reference/dbtignore.md b/website/docs/reference/dbtignore.md new file mode 100644 index 00000000000..268ac855d3a --- /dev/null +++ b/website/docs/reference/dbtignore.md @@ -0,0 +1,25 @@ +--- +title: .dbtignore +--- + +You can create a `.dbtignore` file in the root of your [dbt project](projects) to specify files that should be **entirely** ignored by dbt. The file behaves like a [`.gitignore` file, using the same syntax](https://git-scm.com/docs/gitignore). Files and subdirectories matching the pattern will not be read, parsed, or otherwise detected by dbt—as if they didn't exist. + +**Examples** + + + +```md +# .dbtignore + +# ignore individual .py files +not-a-dbt-model.py +another-non-dbt-model.py + +# ignore all .py files +**.py + +# ignore all .py files with "codegen" in the filename +*codegen*.py +``` + + diff --git a/website/docs/reference/events-logging.md b/website/docs/reference/events-logging.md index ab4fe853eb0..4e643e13b01 100644 --- a/website/docs/reference/events-logging.md +++ b/website/docs/reference/events-logging.md @@ -34,7 +34,7 @@ With every task that dbt performs, it generates events. It records those events When `json` [log formatting](global-configs#log-formatting) is enabled, dbt will output produce rich, structured log information which can be piped into monitoring tools for analysis, or to power applications with dbt metadata in real time. -Each log line will have the following JSON properties: +Each log line will have the following properties: | Field | Description | |-------------|---------------| @@ -43,10 +43,10 @@ Each log line will have the following JSON properties: | [`invocation_id`](invocation_id) | A unique identifier for this invocation of dbt | | `level` | A string representation of the log level (`debug`, `info`, `warn`, `error`) | | `log_version` | Integer indicating version | -| `msg` | The human-friendly log message. **Note**: This message is not intended for machine consumption. Log messages are bject to change in future versions of dbt, and those changes may or may not coincide with a change in `log_version`. | +| `msg` | The human-friendly log message. **Note**: This message is not intended for machine consumption. Log messages are subject to change in future versions of dbt, and those changes may or may not coincide with a change in `log_version`. | | `node_info` | If applicable, a dictionary of human- and machine-friendly information about a currently running resource | | `pid` | The process ID for the running dbt invocation which produced this log message | -| `thread_name` | The thread in which the log message was produced, helpful for tracking queries when dbt is run with ltiple threads | +| `thread_name` | The thread in which the log message was produced, helpful for tracking queries when dbt is run with multiple threads | | `ts` | When the log line was printed | | `type` | Always `log_line` | @@ -90,7 +90,7 @@ If available, `node_info` will include: "resource_type": "model", "type": "node_status", "unique_id": "model.jaffle_shop.name_list" - }, + }, "pid": 81915, "thread_name": "Thread-4", "ts": "2021-12-02T21:47:03.480384Z", @@ -100,8 +100,6 @@ If available, `node_info` will include: ## Python interface -**Be warned:** While dbt-core v1 represents a significant step forward in the stability of the core framework, dbt-core's [python API](dbt-api) is still unstable and liable to change, with the exception of a few specific interfaces. - `dbt-core` makes available a full history of events fired during an invocation, in the form of an `EVENT_HISTORY` object: ```python diff --git a/website/docs/reference/exposure-properties.md b/website/docs/reference/exposure-properties.md index 6593caf3b9e..cee5271ac26 100644 --- a/website/docs/reference/exposure-properties.md +++ b/website/docs/reference/exposure-properties.md @@ -17,13 +17,19 @@ Exposures are defined in `.yml` files nested under an `exposures:` key. You may You can name these files `whatever_you_want.yml`, and nest them arbitrarily deeply in subfolders within the `models/` directory. + + +Exposure names must contain only letters, numbers, and underscores (no spaces or special characters). For a short human-friendly name with title casing, spaces, and special characters, use the `label` property. + + + ```yml version: 2 exposures: - - name: + - name: [description](description): type: {dashboard, notebook, analysis, ml, application} url: @@ -38,6 +44,11 @@ exposures: - ref('model') - ref('seed') - source('name', 'table') + + # added in dbt Core v1.3 + label: "Human-Friendly Name for this Exposure!" + [config](resource-properties/config): + enabled: true | false - name: ... # declare properties of additional exposures ``` @@ -52,6 +63,7 @@ exposures: exposures: - name: weekly_jaffle_metrics + label: Jaffles by the Week # optional, new in dbt Core v1.3 type: dashboard # required maturity: high # optional url: https://bi.tool/dashboards/1 # optional diff --git a/website/docs/reference/global-configs.md b/website/docs/reference/global-configs.md index 5e130093bf5..4afc5cf09b1 100644 --- a/website/docs/reference/global-configs.md +++ b/website/docs/reference/global-configs.md @@ -5,13 +5,11 @@ id: "global-configs" ## About Global Configs -Global configs enable you to fine-tune how dbt runs projects on your machine—whether your personal laptop, an orchestration tool running remotely, or (in some cases) dbt Cloud. They differ from [project configs](reference/dbt_project.yml) and [resource configs](reference/configs-and-properties), which tell dbt _what_ to run. +Global configs enable you to fine-tune _how_ dbt runs projects on your machine—whether your personal laptop, an orchestration tool running remotely, or (in some cases) dbt Cloud. In general, they differ from most [project configs](reference/dbt_project.yml) and [resource configs](reference/configs-and-properties), which tell dbt _what_ to run. -Global configs control things like the visual output of logs, the manner in which dbt parses your project, and what to do when dbt finds a version mismatch or a failing model. +Global configs control things like the visual output of logs, the manner in which dbt parses your project, and what to do when dbt finds a version mismatch or a failing model. These configs are "global" because they are available for all dbt commands, and because they can be set for all projects running on the same machine or in the same environment. -These configs are "global" because they are available for all dbt commands, and because they apply across all projects run on the same machine. - -Starting in v1.0, you can set global configs in three places. When all three are set, command line flags take precedence, then environment variables, and last profile configs. +Starting in v1.0, you can set global configs in three places. When all three are set, command line flags take precedence, then environment variables, and last yaml configs (usually `profiles.yml`). ## Command line flags @@ -88,9 +86,9 @@ $ dbt run -## Profile (or user) configurations +## Yaml configurations -You can set profile (or user) configurations in the `config:` block of `profiles.yml`. You would use the profile config to set defaults for all projects running on your local machine. +For most global configurations, you can set "user profile" configurations in the `config:` block of `profiles.yml`. This style of configuration sets default values for all projects using this profile directory—usually, all projects running on your local machine. @@ -103,6 +101,12 @@ config: + + +The exception: Some global configurations are actually set in `dbt_project.yml`, instead of `profiles.yml`, because they control where dbt places logs and artifacts. Those file paths are always relative to the location of `dbt_project.yml`. For more details, see ["Log and target paths"](#log-and-target-paths) below. + + + ### Cache database objects for selected resource @@ -182,7 +186,7 @@ config: ### Failing fast -Supply the `-x` or `--fail-fast` flag to `dbt run` to make dbt exit immediately if a single resource fails to build. If other models are in-progress when the first model fails, then dbt will terminate the connections for these still-running models. +Supply the `-x` or `--fail-fast` flag to `dbt run` to make dbt exit immediately if a single resource fails to build. If other models are in-progress when the first model fails, then dbt will terminate the connections for these still-running models. For example, you can select four models to run, but if a failure occurs in the first model, the failure will prevent other models from running: @@ -210,7 +214,7 @@ FailFast Error in model model_1 (models/model_1.sql) ### Log Formatting -The `LOG_FORMAT` config specifies how dbt's logs should be formatted. If the value of this config is `json`, dbt will output fully structured logs in JSON format; otherwise, it will output text-formatted logs that are sparser for the CLI and more detailed in `logs/dbt.log`. +The `LOG_FORMAT` config specifies how dbt's logs should be formatted. If the value of this config is `json`, dbt will output fully structured logs in format; otherwise, it will output text-formatted logs that are sparser for the CLI and more detailed in `logs/dbt.log`. @@ -269,11 +273,32 @@ config: + + +### Log and target paths + +By default, dbt will write logs to a directory named `logs/`, and all other artifacts to a directory named `target/`. Both of those directories are located relative to `dbt_project.yml` of the active project—that is, the root directory from which dbt is run. + +Just like other global configs, it is possible to override these values for your environment or invocation by using CLI flags (`--target-path`, `--log-path`) or environment variables (`DBT_TARGET_PATH`, `DBT_LOG_PATH`). + +Unlike the other global configs documented on this page, which can be set in `profiles.yml`, the project paths are configured in `dbt_project.yml`. This is because `profiles.yml` and `dbt_project.yml` are most often located in separate file systems on your machine, and the log and artifact paths are always defined relative to the location of `dbt_project.yml`. + + + +```yaml +[target-path](target-path): "other-target" +[log-path](log-path): "other-logs" +``` + + + + + ### Send anonymous usage stats -We want to build the best version of dbt possible, and a crucial part of that is understanding how users work with dbt. To this end, we've added some simple event tracking to dbt (using Snowplow). We do not track credentials, model contents or model names (we consider these private, and frankly none of our business). +We want to build the best version of dbt possible, and a crucial part of that is understanding how users work with dbt. To this end, we've added some simple event tracking to dbt (using Snowplow). We do not track credentials, raw model contents or model names (we consider these private, and frankly none of our business). -Usage statistics are fired when dbt is invoked and when models are run. These events contain basic platform information (OS + python version). You can see all the event definitions in [`tracking.py`](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/tracking.py). +Usage statistics are fired when dbt is invoked and when models are run. These events contain basic platform information (OS + python version) and metadata such as whether the invocation succeeded, how long it took, an anonymized hash key representing the raw model content, and number of nodes that were run. You can see all the event definitions in [`tracking.py`](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/tracking.py). By default this is turned on – you can opt out of event tracking at any time by adding the following to your `profiles.yml` file: @@ -282,7 +307,7 @@ config: send_anonymous_usage_stats: False ``` -You can also use the DO_NOT_TRACK environmental variable to enable or disable sending anonymous data. For more information, see [Environmental variables](/dbt-cloud/using-dbt-cloud/cloud-environment-variables). +You can also use the DO_NOT_TRACK environmental variable to enable or disable sending anonymous data. For more information, see [Environmental variables](/docs/build/environment-variables). `DO_NOT_TRACK=1` is the same as `DBT_SEND_ANONYMOUS_USAGE_STATS=False` `DO_NOT_TRACK=0` is the same as `DBT_SEND_ANONYMOUS_USAGE_STATS=True` diff --git a/website/docs/reference/macro-properties.md b/website/docs/reference/macro-properties.md index 19a1b4e4890..0f3e8833bde 100644 --- a/website/docs/reference/macro-properties.md +++ b/website/docs/reference/macro-properties.md @@ -14,7 +14,7 @@ version: 2 macros: - name: [description](description): - [docs](resource-properties/docs): + [docs](/reference/resource-configs/docs): show: true | false arguments: - name: diff --git a/website/docs/reference/model-configs.md b/website/docs/reference/model-configs.md index a38e43cd0c1..87027448cfe 100644 --- a/website/docs/reference/model-configs.md +++ b/website/docs/reference/model-configs.md @@ -7,7 +7,7 @@ title: Model configurations ## Related documentation -* [Models](building-models) +* [Models](/docs/build/models) * [`run` command](run) ## Available configurations @@ -108,6 +108,7 @@ models: [+](plus-prefix)[persist_docs](persist_docs): [+](plus-prefix)[full_refresh](full_refresh): [+](plus-prefix)[meta](meta): {} + [+](plus-prefix)[grants](grants): {} ``` @@ -136,7 +137,7 @@ models: [persist_docs](persist_docs): [full_refresh](full_refresh): [meta](meta): {} - + [grants](grants): {} ``` @@ -161,6 +162,7 @@ models: [alias](resource-configs/alias)="", [persist_docs](persist_docs)={}, [meta](meta)={} + [grants](grants)={} ) }} ``` diff --git a/website/docs/reference/model-properties.md b/website/docs/reference/model-properties.md index 5375cbd3bed..1e1bb5e7ecf 100644 --- a/website/docs/reference/model-properties.md +++ b/website/docs/reference/model-properties.md @@ -18,7 +18,7 @@ version: 2 models: - [name](model_name): [description](description): - [docs](resource-properties/docs): + [docs](/reference/resource-configs/docs): show: true | false [config](resource-properties/config): [](model-configs): diff --git a/website/docs/reference/node-selection/defer.md b/website/docs/reference/node-selection/defer.md index f2361d587f0..f5a6d5e5c0c 100644 --- a/website/docs/reference/node-selection/defer.md +++ b/website/docs/reference/node-selection/defer.md @@ -9,9 +9,9 @@ title: "Defer" -**N.B.** Deferral is a powerful, complex feature that enables compelling workflows. We reserve the right to change the name and syntax in a future version of dbt to make the behavior clearer and more intuitive. For details, see [dbt#2968](https://github.com/fishtown-analytics/dbt/issues/2968). +**N.B.** Deferral is a powerful, complex feature that enables compelling workflows. We reserve the right to change the name and syntax in a future version of dbt to make the behavior clearer and more intuitive. For details, see [dbt#2968](https://github.com/dbt-labs/dbt-core/issues/2968). -Defer is a powerful feature that makes it possible to run a subset of models or tests in a [sandbox environment](managing-environments), without having to first build their upstream parents. This can save time and computational resources when you want to test a small number of models in a large project. +Defer is a powerful feature that makes it possible to run a subset of models or tests in a [sandbox environment](docs/collaborate/environments), without having to first build their upstream parents. This can save time and computational resources when you want to test a small number of models in a large project. Defer requires that a manifest from a previous dbt invocation be passed to the `--state` flag or env var. Together with the `state:` selection method, these features enable "Slim CI". Read more about [state](understanding-state). ### Usage @@ -46,7 +46,7 @@ When using defer, you may be selecting from production datasets, development dat - if you apply env-specific limits in dev but not prod, as you may end up selecting more data than you expect - when executing tests that depend on multiple parents (e.g. `relationships`), since you're testing "across" environments -Deferral requires both `--defer` and `--state` to be set, either by passing flags explicitly or by setting environment variables (`DBT_DEFER_TO_STATE` and `DBT_ARTIFACT_STATE_PATH`). If you use dbt Cloud, read about [how to set up CI jobs](cloud-enabling-continuous-integration-with-github). +Deferral requires both `--defer` and `--state` to be set, either by passing flags explicitly or by setting environment variables (`DBT_DEFER_TO_STATE` and `DBT_ARTIFACT_STATE_PATH`). If you use dbt Cloud, read about [how to set up CI jobs](/docs/deploy/cloud-ci-job). ### Example diff --git a/website/docs/reference/node-selection/methods.md b/website/docs/reference/node-selection/methods.md index 6fdf651452c..43c0fe1b628 100644 --- a/website/docs/reference/node-selection/methods.md +++ b/website/docs/reference/node-selection/methods.md @@ -24,7 +24,7 @@ The `tag:` method is used to select models that match a specified [tag](resource ### The "source" method -The `source` method is used to select models that select from a specified [source](using-sources). Use in conjunction with the `+` operator. +The `source` method is used to select models that select from a specified [source](/docs/build/sources#using-sources). Use in conjunction with the `+` operator. @@ -73,6 +73,19 @@ selectors unambiguous. + + +### The "file" method +The `file` method can be used to select a model by its filename, including the file extension (`.sql`). + +```bash +# These are equivalent +dbt run --select some_model.sql +dbt run --select some_model +``` + + + ### The "package" method New in v0.18.0 @@ -125,6 +138,32 @@ The `config` method is used to select models that match a specified [node config + + +While most config values are strings, you can also use the `config` method to match boolean configs, dictionary keys, and values in lists. + +For example, given a model with the following configurations: +``` +{{ config( + materialized = 'incremental', + unique_key = ['column_a', 'column_b'], + grants = {'select': ['reporter', 'analysts']}, + transient = true +) }} + +select ... +``` + + You can select using any of the following: +```bash +$ dbt ls -s config.materialized:incremental +$ dbt ls -s config.unique_key:column_a +$ dbt ls -s config.grants.select:reporter +$ dbt ls -s config.transient:true +``` + + + ### The "test_type" method @@ -157,7 +196,7 @@ The `test_type` method is used to select tests based on their type, `singular` o The `test_name` method is used to select tests based on the name of the generic test that defines it. For more information about how generic tests are defined, read about -[tests](building-a-dbt-project/tests). +[tests](/docs/build/tests). @@ -293,4 +332,4 @@ After issuing one of the above commands, you can reference the source freshness $ dbt source freshness # must be run again to compare current to previous state $ dbt build --select source_status:fresher+ --state path/to/prod/artifacts ``` - \ No newline at end of file + diff --git a/website/docs/reference/node-selection/syntax.md b/website/docs/reference/node-selection/syntax.md index b95dfdaacfa..5787ca3355b 100644 --- a/website/docs/reference/node-selection/syntax.md +++ b/website/docs/reference/node-selection/syntax.md @@ -114,3 +114,6 @@ dbt supports a shorthand language for defining subsets of nodes. This language u As your selection logic gets more complex, and becomes unwieldly to type out as command-line arguments, consider using a [yaml selector](yaml-selectors). You can use a predefined definition with the `--selector` flag. Note that when you're using `--selector`, most other flags (namely `--select` and `--exclude`) will be ignored. + + + diff --git a/website/docs/reference/node-selection/yaml-selectors.md b/website/docs/reference/node-selection/yaml-selectors.md index 0089aa6d13a..eeaa7be7267 100644 --- a/website/docs/reference/node-selection/yaml-selectors.md +++ b/website/docs/reference/node-selection/yaml-selectors.md @@ -35,7 +35,7 @@ selectors: ## Definitions Each `definition` is comprised of one or more arguments, which can be one of the following: -* **CLI-style:** strings, representing CLI-style) arguments +* **CLI-style:** strings, representing CLI-style arguments * **Key-value:** pairs in the form `method: value` * **Full YAML:** fully specified dictionaries with items for `method`, `value`, operator-equivalent keywords, and support for `exclude` @@ -124,10 +124,10 @@ As a general rule, dbt will indirectly select _all_ tests if they touch _any_ re - union: - method: fqn value: model_a - greedy: eager # default: will include all tests that touch model_a + indirect_selection: eager # default: will include all tests that touch model_a - method: fqn value: model_b - greedy: cautious # will not include tests touching model_b + indirect_selection: cautious # will not include tests touching model_b # if they have other unselected parents ``` @@ -261,3 +261,31 @@ selectors: default: "{{ target.name == 'prod' | as_bool }}" definition: ... ``` + + + +### Selector inheritance + +Selectors can reuse and extend definitions from other selectors, via the `selector` method. + +```yml +selectors: + - name: foo_and_bar + definition: + intersection: + - tag: foo + - tag: bar + + - name: foo_bar_less_buzz + definition: + intersection: + # reuse the definition from above + - method: selector + value: foo_and_bar + # with a modification! + - exclude: + - method: tag + value: buzz +``` + + diff --git a/website/docs/reference/parsing.md b/website/docs/reference/parsing.md index f6b3f72461d..256c01dd094 100644 --- a/website/docs/reference/parsing.md +++ b/website/docs/reference/parsing.md @@ -30,7 +30,7 @@ python -c "from yaml import CLoader" ## Partial parsing -After parsing your project, dbt stores an internal project manifest in a file called `partial_parse.msgpack`. When partial parsing is enabled, dbt will use that internal manifest to determine which files have been changed (if any) since it last parsed the project. Then, it will _only_ parse the changed files, or files related those changes. +After parsing your project, dbt stores an internal project manifest in a file called `partial_parse.msgpack`. When partial parsing is enabled, dbt will use that internal manifest to determine which files have been changed (if any) since it last parsed the project. Then, it will _only_ parse the changed files, or files related to those changes. Starting in v1.0, partial parsing is **on** by default. In development, partial parsing can significantly reduce the time spent waiting at the start of a run, which translates to faster dev cycles and iteration. @@ -56,7 +56,7 @@ If you ever get into a bad state, you can disable partial parsing and trigger a ## Static parser -At parse time, dbt needs to extract the contents of `ref()`, `source()`, and `config()` from all models in the project. Traditionally, dbt has extracted those values by rendering the Jinja in every model file, which can be slow. In v0.20, we introduced a new way to statically analyze model files, leveraging [`tree-sitter`](https://github.com/tree-sitter/tree-sitter), which we're calling an "experimental parser". You can see the code for an initial Jinja2 grammar [here](https://github.com/fishtown-analytics/tree-sitter-jinja2). +At parse time, dbt needs to extract the contents of `ref()`, `source()`, and `config()` from all models in the project. Traditionally, dbt has extracted those values by rendering the Jinja in every model file, which can be slow. In v0.20, we introduced a new way to statically analyze model files, leveraging [`tree-sitter`](https://github.com/tree-sitter/tree-sitter), which we're calling an "experimental parser". You can see the code for an initial Jinja2 grammar [here](https://github.com/dbt-labs/tree-sitter-jinja2). Starting in v1.0, the experimental parser is **on** by default. We believe it can offer *some* speedup to 95% of projects. You may optionally turn it off using the [`STATIC_PARSER` global config](global-configs#static-parser). diff --git a/website/docs/reference/profiles.yml.md b/website/docs/reference/profiles.yml.md index 4030ed6a0e5..96f038f6782 100644 --- a/website/docs/reference/profiles.yml.md +++ b/website/docs/reference/profiles.yml.md @@ -4,7 +4,7 @@ If you're using the dbt CLI, you'll need to set up a `profiles.yml` file. -You can learn more about this in the article on [Connecting to your warehouse](configure-your-profile). +You can learn more about this in the article on [Connecting to your warehouse](/docs/get-started/connection-profiles). This article lists the parts of your `profiles.yml` which are _not_ database specific. Check out the article for your database for exact connection details. @@ -26,7 +26,7 @@ This article lists the parts of your `profiles.yml` which are _not_ database spe [static_parser](global-configs#static-parser): : - target: + target: # this is the default target outputs: : type: diff --git a/website/docs/reference/project-configs/asset-paths.md b/website/docs/reference/project-configs/asset-paths.md index 3621cc133d6..401a88d86c7 100644 --- a/website/docs/reference/project-configs/asset-paths.md +++ b/website/docs/reference/project-configs/asset-paths.md @@ -16,7 +16,7 @@ Optionally specify a custom list of directories to copy to the `target` director -* `v0.18.0`: This configuration was introduced — see the [migration guide for 0.18.0](upgrading-to-0-18-0) for more details. +* `v0.18.0`: This configuration was introduced — see the [migration guide](/guides/migration/versions) for more details. diff --git a/website/docs/reference/project-configs/config-version.md b/website/docs/reference/project-configs/config-version.md index b3ee4f51590..60e787513a5 100644 --- a/website/docs/reference/project-configs/config-version.md +++ b/website/docs/reference/project-configs/config-version.md @@ -15,7 +15,7 @@ Specify your `dbt_project.yml` as using the v2 structure. -* `v0.17.0`: This configuration was introduced — see the [migration guide for 0.17.0](upgrading-to-0-17-0) for more details. +* `v0.17.0`: This configuration was introduced — see the [migration guide](/guides/migration/versions) for more details. diff --git a/website/docs/reference/project-configs/dispatch-config.md b/website/docs/reference/project-configs/dispatch-config.md index 303b5f6b820..6cb8060949c 100644 --- a/website/docs/reference/project-configs/dispatch-config.md +++ b/website/docs/reference/project-configs/dispatch-config.md @@ -18,34 +18,33 @@ dispatch: ## Definition -Optionally override the [dispatch](dispatch) search locations for macros in certain namespaces. +Optionally override the [dispatch](dispatch) search locations for macros in certain namespaces. If not specified, `dispatch` will look in your root project _first_, by default, and then look for implementations in the package named by `macro_namespace`. ## Examples -I've reimplemented certain macros from the `dbt_utils` package, and I want my versions to take precedence. Otherwise, fall back to the versions in `dbt_utils`. + +I want to "shim" the `dbt_utils` package with the `spark_utils` compatibility package. ```yml dispatch: - macro_namespace: dbt_utils - search_order: - - macro_namespace: packagename - search_order: [packagename] + search_order: ['spark_utils', 'dbt_utils'] ``` -I want to "shim" the `dbt_utils` package with the `spark_utils` compatibility package. +I've reimplemented certain macros from the `dbt_utils` package in my root project (`'my_root_project'`), and I want my versions to take precedence. Otherwise, fall back to the versions in `dbt_utils`. + +_Note: As of v0.21.1, this is the default behavior. You may optionally choose to express that search order explicitly as:_ ```yml dispatch: - - macro_namespace: packagename - search_order: [packagename] - - macro_namespace: packagename - search_order: [packagename] + - macro_namespace: dbt_utils + search_order: ['my_root_project', 'dbt_utils'] ``` diff --git a/website/docs/reference/project-configs/log-path.md b/website/docs/reference/project-configs/log-path.md index 5dc1bf69806..64b5d73ce85 100644 --- a/website/docs/reference/project-configs/log-path.md +++ b/website/docs/reference/project-configs/log-path.md @@ -16,6 +16,18 @@ Optionally specify a custom directory where dbt will write logs. ## Default By default, dbt will write to the `logs` directory, i.e. `log-path: logs` + +## Configuration + +In the manner of a ["global" config](global-configs), the log path can be set in three places: +1. `--log-path` CLI flag +2. `DBT_LOG_PATH` environment variable +3. `log-path` in `dbt_project.yml` + +The precedence order is: CLI flag > env var > `dbt_project.yml` + + + ## Examples ### Write logs to a subdirectory named `dbt_logs` instead of `logs` diff --git a/website/docs/reference/project-configs/model-paths.md b/website/docs/reference/project-configs/model-paths.md index 1e429da5360..2129747af27 100644 --- a/website/docs/reference/project-configs/model-paths.md +++ b/website/docs/reference/project-configs/model-paths.md @@ -17,7 +17,7 @@ model-paths: [directorypath] ## Definition -Optionally specify a custom list of directories where [models](building-models) and [sources](using-sources) are located. +Optionally specify a custom list of directories where [models](/docs/build/models) and [sources](/docs/build/sources) are located. ## Default By default, dbt will search for models and sources in the `models` directory, i.e. `model-paths: ["models"]` diff --git a/website/docs/reference/project-configs/on-run-start-on-run-end.md b/website/docs/reference/project-configs/on-run-start-on-run-end.md index 7456c0fb845..091122baa88 100644 --- a/website/docs/reference/project-configs/on-run-start-on-run-end.md +++ b/website/docs/reference/project-configs/on-run-start-on-run-end.md @@ -29,6 +29,11 @@ A SQL statement (or list of SQL statements) to be run at the start, or end, of t * The `on-run-end` hook has additional jinja variables available in the context — check out the [docs](on-run-end-context). ## Examples + + + + + ### Grant privileges at the end of a run @@ -53,6 +58,8 @@ on-run-end: + + ### Grant privileges on all schemas that dbt uses at the end of a run This leverages the [schemas](schemas) variable that is only available in an `on-run-end` hook. diff --git a/website/docs/reference/project-configs/packages-install-path.md b/website/docs/reference/project-configs/packages-install-path.md index 4798a02831a..3ab1869ad48 100644 --- a/website/docs/reference/project-configs/packages-install-path.md +++ b/website/docs/reference/project-configs/packages-install-path.md @@ -18,7 +18,7 @@ packages-install-path: directorypath ## Definition -Optionally specify a custom directory where [packages](package-management) are installed when you run the `dbt deps` [command](deps). Note that this directory is usually git-ignored. +Optionally specify a custom directory where [packages](/docs/build/packages) are installed when you run the `dbt deps` [command](deps). Note that this directory is usually git-ignored. ## Default By default, dbt will install packages in the `dbt_packages` directory, i.e. `packages-install-path: dbt_packages` diff --git a/website/docs/reference/project-configs/profile.md b/website/docs/reference/project-configs/profile.md index de11b520921..9ea4cd55d63 100644 --- a/website/docs/reference/project-configs/profile.md +++ b/website/docs/reference/project-configs/profile.md @@ -10,15 +10,15 @@ profile: string ## Definition -The profile your dbt project should use to connect to your data warehouse. +The profile your dbt project should use to connect to your . * If you are developing in dbt Cloud: This configuration is optional * If you are developing locally: This configuration is required, unless a command-line option (i.e. `--profile`) is supplied. ## Related guides -* [Connecting to your warehouse](dbt-cli/configure-your-profile) +* [Connecting to your warehouse using the command line](/docs/get-started/connection-profiles#connecting-to-your-warehouse-using-the-command-line) ## Recommendation -Often an organization has only one data warehouse, so it is sensible to use your organization's name as a profile name, in `snake_case`. For example: +Often an organization has only one , so it is sensible to use your organization's name as a profile name, in `snake_case`. For example: * `profile: acme` * `profile: jaffle_shop` diff --git a/website/docs/reference/project-configs/query-comment.md b/website/docs/reference/project-configs/query-comment.md index 9738ce9fc61..9b9eae99a31 100644 --- a/website/docs/reference/project-configs/query-comment.md +++ b/website/docs/reference/project-configs/query-comment.md @@ -39,7 +39,7 @@ The `query-comment` configuration can also call a macro that returns a string. ## Default -By default, dbt will insert a JSON comment at the top of your query containing the information including the dbt version, profile and target names, and node ids for the resources it runs. For example: +By default, dbt will insert a comment at the top of your query containing the information including the dbt version, profile and target names, and node ids for the resources it runs. For example: ```sql /* {"app": "dbt", "dbt_version": "0.15.0rc2", "profile_name": "debug", @@ -156,7 +156,7 @@ select ... -If `query-comment.job-label` is set to true, dbt will include the query comment items, if a dictionary, or the comment string, as job labels on the query it executes. These will be included in addition to labels specified in the [BigQuery-specific config](bigquery-configs). +If `query-comment.job-label` is set to true, dbt will include the query comment items, if a dictionary, or the comment string, as job labels on the query it executes. These will be included in addition to labels specified in the [BigQuery-specific config](/reference/project-configs/query-comment#bigquery-include-query-comment-items-as-job-labels). @@ -168,8 +168,6 @@ query-comment: - - ### Append a custom comment The following example uses the dictionary syntax to append (rather than prepend) a comment that varies based on the configured `user` specified in the active dbt target. diff --git a/website/docs/reference/project-configs/seed-paths.md b/website/docs/reference/project-configs/seed-paths.md index 34048982321..92f7c5aa91f 100644 --- a/website/docs/reference/project-configs/seed-paths.md +++ b/website/docs/reference/project-configs/seed-paths.md @@ -18,11 +18,11 @@ seed-paths: [directorypath] ## Definition -Optionally specify a custom list of directories where [seed](docs/building-a-dbt-project/seeds.md) files are located. +Optionally specify a custom list of directories where [seed](/docs/build/seeds) files are located. ## Default -By default, dbt expects analyses to be located in the `seeds` directory, i.e. `seed-paths: ["seeds"]` +By default, dbt expects seeds to be located in the `seeds` directory, i.e. `seed-paths: ["seeds"]` ## Examples ### Use a subdirectory named `custom_seeds` instead of `seeds` diff --git a/website/docs/reference/project-configs/target-path.md b/website/docs/reference/project-configs/target-path.md index 9bbd2c19de0..82956d9688d 100644 --- a/website/docs/reference/project-configs/target-path.md +++ b/website/docs/reference/project-configs/target-path.md @@ -17,6 +17,18 @@ Optionally specify a custom directory where compiled files (e.g. compiled models ## Default By default, dbt will write compiled files to the `target` directory, i.e. `target-path: target` + +## Configuration + +In the manner of a ["global" config](global-configs), the target path can be set in three places: +1. `--target-path` CLI flag +2. `DBT_TARGET_PATH` environment variable +3. `target-path` in `dbt_project.yml` + +The precedence order is: CLI flag > env var > `dbt_project.yml` + + + ## Examples ### Use a subdirectory named `compiled` for compiled files diff --git a/website/docs/reference/project-configs/test-paths.md b/website/docs/reference/project-configs/test-paths.md index 4d99774b5ab..eec360663d9 100644 --- a/website/docs/reference/project-configs/test-paths.md +++ b/website/docs/reference/project-configs/test-paths.md @@ -18,7 +18,7 @@ test-paths: [directorypath] ## Definition -Optionally specify a custom list of directories where [data tests](docs/building-a-dbt-project/tests#data-tests) are located. +Optionally specify a custom list of directories where [data tests](/docs/build/tests) are located. ## Default Without specifying this config, dbt will search for tests in the `tests` directory, i.e. `test-paths: ["tests"]`. Specifically, it will look for `.sql` files containing: diff --git a/website/docs/reference/project-configs/version.md b/website/docs/reference/project-configs/version.md index 31d91ad9c82..346f1f85fef 100644 --- a/website/docs/reference/project-configs/version.md +++ b/website/docs/reference/project-configs/version.md @@ -7,7 +7,7 @@ dbt projects have two distinct types of the `version` tags. This field has a dif ## `dbt_project.yml` versions -The version tag in a `dbt_project` file represents the version of your dbt project. Although **this is a required parameter**, it is not currently meaningfully used by dbt. The version must be in a [semantic version](https://semver.org/) format, e.g. `1.0.0`. For more on Core versions, see "[About dbt Core versions](/docs/core-versions)." +The version tag in a `dbt_project` file represents the version of your dbt project. Although **this is a required parameter**, it is not currently meaningfully used by dbt. The version must be in a [semantic version](https://semver.org/) format, e.g. `1.0.0`. For more on Core versions, see [About dbt Core versions](/docs/dbt-versions/core). ```yml @@ -29,7 +29,5 @@ For more on property files, see their general [documentation](reference/configs- ```yml version: 2 # Only 2 is accepted by current and recent versions of dbt. ``` - - - + diff --git a/website/docs/reference/resource-configs/alias.md b/website/docs/reference/resource-configs/alias.md index 0ba3482ea97..cdf832f643d 100644 --- a/website/docs/reference/resource-configs/alias.md +++ b/website/docs/reference/resource-configs/alias.md @@ -10,7 +10,7 @@ This is a work in progress document. While this configuration applies to multipl ## Definition -Optionally specify a custom alias for a [model](docs/building-a-dbt-project/building-models) or [seed](docs/building-a-dbt-project/seeds). +Optionally specify a custom alias for a [model](/docs/build/models) or [seed](/docs/build/seeds). When dbt creates a relation (/) in a database, it creates it as: `{{ database }}.{{ schema }}.{{ identifier }}`, e.g. `analytics.finance.payments` @@ -18,7 +18,7 @@ The standard behavior of dbt is: * If a custom alias is _not_ specified, the identifier of the relation is the resource name (i.e. the filename). * If a custom alias is specified, the identifier of the relation is the `{{ alias }}` value. -To learn more about changing the way that dbt generates a relation's `identifier`, read [Using Aliases](docs/building-a-dbt-project/building-models/using-custom-aliases.md). +To learn more about changing the way that dbt generates a relation's `identifier`, read [Using Aliases](/docs/build/custom-aliases). ## Usage diff --git a/website/docs/reference/resource-configs/azuresynapse-configs.md b/website/docs/reference/resource-configs/azuresynapse-configs.md new file mode 100644 index 00000000000..3281fd9627c --- /dev/null +++ b/website/docs/reference/resource-configs/azuresynapse-configs.md @@ -0,0 +1,72 @@ +--- +title: "Microsoft Azure Synapse DWH configurations" +id: "azuresynapse-configs" +--- + +All [configuration options for the Microsoft SQL Server adapter](mssql-configs) also apply to this adapter. + +Additionally, the configuration options below are available. + +### Indices and distributions + +The main index and the distribution type can be set for models that are materialized to tables. + + + + + + + +```sql +{{ + config( + index='HEAP', + dist='ROUND_ROBIN' + ) +}} + +select * +from ... +``` + + + + + + + + + +```yaml +models: + your_project_name: + materialized: view + staging: + materialized: table + index: HEAP +``` + + + + + + + +The following are the supported index types: + +* `CLUSTERED COLUMNSTORE INDEX` (default) +* `HEAP` +* `CLUSTERED INDEX (COLUMN_NAME)` +* `CLUSTERED COLUMNSTORE INDEX ORDER(COLUMN_NAME)` + +The following are the supported distribution types: + +* `ROUND_ROBIN` (default) +* `HASH(COLUMN_NAME)` +* `REPLICATE` diff --git a/website/docs/reference/resource-configs/bigquery-configs.md b/website/docs/reference/resource-configs/bigquery-configs.md index 2e829a5a566..357082f2ebb 100644 --- a/website/docs/reference/resource-configs/bigquery-configs.md +++ b/website/docs/reference/resource-configs/bigquery-configs.md @@ -126,7 +126,7 @@ as ( #### Partitioning with integer buckets If the `data_type` is specified as `int64`, then a `range` key must also -be provied in the `partition_by` dict. dbt will use the values provided in +be provided in the `partition_by` dict. dbt will use the values provided in the `range` dict to generate the partitioning clause for the table. views that it creates. These labels can be specified using the `labels` model config. The `labels` config can be provided in a model config, or in the `dbt_project.yml` file, as shown below. + +:::info Note +BigQuery requires that both key-value pair entries for labels have a maximum length of 63 characters. +::: **Configuring labels in a model file** @@ -376,7 +380,7 @@ Please note that in order for policy tags to take effect, [column-level `persist ## Merge behavior (incremental models) -The [`incremental_strategy` config](configuring-incremental-models#about-incremental_strategy) controls how dbt builds incremental models. dbt uses a [merge statement](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) on BigQuery to refresh incremental tables. +The [`incremental_strategy` config](/docs/build/incremental-models#about-incremental_strategy) controls how dbt builds incremental models. dbt uses a [merge statement](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) on BigQuery to refresh incremental tables. The `incremental_strategy` config can be set to one of two values: - `merge` (default) @@ -442,7 +446,7 @@ declare dbt_partitions_for_replacement array; set (dbt_partitions_for_replacement) = ( select as struct array_agg(distinct date(max_tstamp)) - from `my_project`.`my_dataset`.`sessions` + from `my_project`.`my_dataset`.{{ model_name }}__dbt_tmp ); /* @@ -594,6 +598,7 @@ select ... ## Authorized Views + New in v0.18.0 If the `grant_access_to` config is specified for a model materialized as a @@ -601,6 +606,8 @@ view, dbt will grant the view model access to select from the list of datasets provided. See [BQ docs on authorized views](https://cloud.google.com/bigquery/docs/share-access-views) for more details. + + ```yml @@ -629,10 +636,8 @@ models: -Views with this configuration will be able to select from objects in -`project_1.dataset_1` and `project_2.dataset_2`, even when they are located -elsewhere and queried by users who do not otherwise have -access to `project_1.dataset_1` and `project_2.dataset_2`. +Views with this configuration will be able to select from objects in `project_1.dataset_1` and `project_2.dataset_2`, even when they are located elsewhere and queried by users who do not otherwise have access to `project_1.dataset_1` and `project_2.dataset_2`. #### Limitations + The `grant_access_to` config is not thread-safe when multiple views need to be authorized for the same dataset. The initial `dbt run` operation after a new `grant_access_to` config is added should therefore be executed in a single thread. Subsequent runs using the same configuration will not attempt to re-apply existing access grants, and can make use of multiple threads. diff --git a/website/docs/reference/resource-configs/clickhouse-configs.md b/website/docs/reference/resource-configs/clickhouse-configs.md new file mode 100644 index 00000000000..9057b6cc228 --- /dev/null +++ b/website/docs/reference/resource-configs/clickhouse-configs.md @@ -0,0 +1,223 @@ +--- +title: "ClickHouse configurations" +id: "clickhouse-configs" +--- + +## Models + +| Type | Supported? | Details | +|-----------------------------|------------|----------------------------------------------------------------------------------------------------------------------------------| +| view materialization | YES | Creates a [view](https://clickhouse.com/docs/en/sql-reference/table-functions/view/). | +| table materialization | YES | Creates a [table](https://clickhouse.com/docs/en/operations/system-tables/tables/). See below for the list of supported engines. | +| incremental materialization | YES | Creates a table if it doesn't exist, and then writes only updates to it. | + +### View Materialization + +A dbt model can be created as a [ClickHouse view](https://clickhouse.com/docs/en/sql-reference/table-functions/view/) and configured using the following syntax: + + + + + + +```yaml +models: + : + +materialized: view +``` + + + + + + + +```jinja +{{ config(materialized = "view") }} +``` + + + + + +### Table Materialization + +A dbt model can be created as a [ClickHouse table](https://clickhouse.com/docs/en/operations/system-tables/tables/) and configured using the following syntax: + + + + + + +```yaml +models: + : + +materialized: table + +order_by: [ , ... ] + +engine: + +partition_by: [ , ... ] +``` + + + + + + + +```jinja +{{ config( + materialized = "table", + engine = "", + order_by = [ "", ... ], + partition_by = [ "", ... ], + ... + ] +) }} +``` + + + + + +#### Table Configuration + +| Option | Description | Required? | +|----------------|------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------| +| `materialized` | How the model will be materialized into ClickHouse. Must be `table` to create a table model. | Required | +| `engine` | The table engine to use when creating tables. See list of supported engines below. | Optional (default: `MergeTree()`) | +| `order_by` | A tuple of column names or arbitrary expressions. This allows you to create a small sparse index that helps find data faster. | Optional (default: `tuple()`) | +| `partition_by` | A partition is a logical combination of records in a table by a specified criterion. The partition key can be any expression from the table columns. | Optional | + +### Incremental Materialization + +Table model will be reconstructed for each dbt execution. This may be infeasible and extremely costly for larger result sets or complex transformations. To address this challenge and reduce the build time, a dbt model can be created as an incremental ClickHouse table and is configured using the following syntax: + + + + + + +```yaml +models: + : + +materialized: incremental + +order_by: [ , ... ] + +engine: + +partition_by: [ , ... ] + +unique_key: [ , ... ] + +inserts_only: [ True|False ] +``` + + + + + + + +```jinja +{{ config( + materialized = "incremental", + engine = "", + order_by = [ "", ... ], + partition_by = [ "", ... ], + unique_key = [ "", ... ], + inserts_only = [ True|False ], + ... + ] +) }} +``` + + + + + +#### Incremental Table Configuration + +| Option | Description | Required? | +|----------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------| +| `materialized` | How the model will be materialized into ClickHouse. Must be `table` to create a table model. | Required | +| `unique_key` | A tuple of column names that uniquely identify rows. For more details on uniqueness constraints, see [here](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models#defining-a-uniqueness-constraint-optional). | Required. If not provided altered rows will be added twice to the incremental table. | +| `engine` | The table engine to use when creating tables. See list of supported engines below. | Optional (default: `MergeTree()`) | +| `order_by` | A tuple of column names or arbitrary expressions. This allows you to create a small sparse index that helps find data faster. | Optional (default: `tuple()`) | +| `partition_by` | A partition is a logical combination of records in a table by a specified criterion. The partition key can be any expression from the table columns. | Optional | +| `inserts_only` | If set to True, incremental updates will be inserted directly to the target incremental table without creating intermediate table. Read more about this configuration in our [doc](https://clickhouse.com/docs/en/integrations/dbt/dbt-incremental-model#inserts-only-mode) | Optional (default: `False`) | + +## Snapshot + +dbt snapshots allow a record to be made of changes to a mutable model over time. This in turn allows point-in-time queries on models, where analysts can “look back in time” at the previous state of a model. This functionality is supported by the ClickHouse connector and is configured using the following syntax: + + + +```jinja +{{ + config( + target_schema = "", + unique_key = "", + strategy = "", + updated_at = "", + ) +}} +``` + + + +#### Snapshot Configuration + +| Option | Description | Required? | +|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------| +| `target_schema` | A ClickHouse's database name where the snapshot table will be created. | Required | +| `unique_key` | A tuple of column names that uniquely identify rows. | Required. If not provided altered rows will be added twice to the incremental table. | +| `strategy` | Defines how dbt knows if a row has changed. More about dbt startegies [here](https://docs.getdbt.com/docs/building-a-dbt-project/snapshots#detecting-row-changes) | Required | +| `updated_at` | If using the timestamp strategy, the timestamp column to compare. | Only if using the timestamp strategy | + +## Supported Table Engines + +| Type | Details | +|------------------------|-------------------------------------------------------------------------------------------| +| MergeTree (default) | https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/mergetree/. | +| HDFS | https://clickhouse.com/docs/en/engines/table-engines/integrations/hdfs | +| MaterializedPostgreSQL | https://clickhouse.com/docs/en/engines/table-engines/integrations/materialized-postgresql | +| S3 | https://clickhouse.com/docs/en/engines/table-engines/integrations/s3 | +| EmbeddedRocksDB | https://clickhouse.com/docs/en/engines/table-engines/integrations/embedded-rocksdb | +| Hive | https://clickhouse.com/docs/en/engines/table-engines/integrations/hive | + +If you encounter issues connecting to ClickHouse from dbt with one of the above engines, please report an issue [here](https://github.com/ClickHouse/dbt-clickhouse/issues). + +## Cross Database Macro Support + +dbt-clickhouse supports most of the cross database macros now included in dbt-core, with the following exceptions: +* The `listagg` SQL function (and therefore the corresponding dbt macro) is not supported by ClickHouse. You can achieve similar results with the ClickHouse `groupArray` function but in some cases subqueries may be required to achieve the desired ordering. +* The `split_part` SQL function is implemented in ClickHouse using the splitByChar function. This function requires using a constant string for the "split" delimiter, so the `delimeter` parameter used for this macro will be interpreted as a string, not a column name +* Similarly, the `replace` SQL function in ClickHouse requires constant strings for the `old_chars` and `new_chars` parameters, so those parameters will be interpreted as strings rather than column names when invoking this macro. + +## Setting `quote_columns` + +To prevent a warning, make sure to explicitly set a value for `quote_columns` in your `dbt_project.yml`. See the [doc on quote_columns](https://docs.getdbt.com/reference/resource-configs/quote_columns) for more information. + +```yaml +seeds: + +quote_columns: false #or `true` if you have csv column headers with spaces +``` + + diff --git a/website/docs/reference/resource-configs/column_types.md b/website/docs/reference/resource-configs/column_types.md index 04645e143a0..274166a9aba 100644 --- a/website/docs/reference/resource-configs/column_types.md +++ b/website/docs/reference/resource-configs/column_types.md @@ -4,7 +4,7 @@ datatype: {column_name: datatype} --- ## Description -Optionally specify the database type of columns in a [seed](docs/building-a-dbt-project/seeds.md), by providing a dictionary where the keys are the column names, and the values are a valid datatype (this varies across databases). +Optionally specify the database type of columns in a [seed](/docs/build/seeds), by providing a dictionary where the keys are the column names, and the values are a valid datatype (this varies across databases). Without specifying this, dbt will infer the datatype based on the column values in your seed file. @@ -20,14 +20,16 @@ seeds: +column_types: country_code: varchar(2) country_name: varchar(32) - ``` - + Or (as of v0.21): + + + ```yml version: 2 @@ -77,3 +79,6 @@ seeds: ## Recommendation Use this configuration only when required, i.e. when the type inference is not working as expected. Otherwise you can omit this configuration. + +## Troubleshooting +Note: The `column_types` configuration is case-sensitive, regardless of quoting configuration. If you specify a column as `Country_Name` in your Seed, you should reference it as `Country_Name`, and not `country_name`. diff --git a/website/docs/reference/resource-configs/database.md b/website/docs/reference/resource-configs/database.md index 4cb31d6f899..41115864850 100644 --- a/website/docs/reference/resource-configs/database.md +++ b/website/docs/reference/resource-configs/database.md @@ -10,7 +10,7 @@ This is a work in progress document. While this configuration applies to multipl ## Definition -Optionally specify a custom database for a [model](docs/building-a-dbt-project/building-models) or [seed](docs/building-a-dbt-project/seeds). (To specify a database for a [snapshot](snapshots), use the [`target_database` config](target_database)). +Optionally specify a custom database for a [model](docs/build/models) or [seed](/docs/build/seeds). (To specify a database for a [snapshot](snapshots), use the [`target_database` config](target_database)). When dbt creates a relation (/) in a database, it creates it as: `{{ database }}.{{ schema }}.{{ identifier }}`, e.g. `analytics.finance.payments` @@ -18,7 +18,7 @@ The standard behavior of dbt is: * If a custom database is _not_ specified, the database of the relation is the target database (`{{ target.database }}`). * If a custom database is specified, the database of the relation is the `{{ database }}` value. -To learn more about changing the way that dbt generates a relation's `database`, read [Using Custom Databases](using-custom-databases) +To learn more about changing the way that dbt generates a relation's `database`, read [Using Custom Databases](/docs/build/custom-databases) diff --git a/website/docs/reference/resource-configs/docs.md b/website/docs/reference/resource-configs/docs.md new file mode 100644 index 00000000000..c35890e0bf7 --- /dev/null +++ b/website/docs/reference/resource-configs/docs.md @@ -0,0 +1,308 @@ +--- +resource_types: models +datatype: "{dictionary}" +default_value: {show: true} +--- + + + + + + +```yml +version: 2 + +models: + - name: model_name + docs: + show: true | false + +``` + + + + + + + +This property is not implemented for sources. + + + + + + + +```yml +version: 2 + +seeds: + - name: seed_name + docs: + show: true | false + +``` + + + + + + + + + +```yml +version: 2 + +snapshots: + - name: snapshot_name + docs: + show: true | false + +``` + + + + + + + + + +```yml +version: 2 + +analyses: + - name: analysis_name + docs: + show: true | false +``` + + + + + + + + + + + +```yml +version: 2 + +macros: + - name: macro_name + docs: + show: true | false + +``` + + + + + + + +## Definition +The docs field can be used to provide documentation-specific configuration to models. The only currently supported docs attribute is shown, which controls whether or not models are shown in the auto-generated documentation website. + +**Note:** hidden models will still appear in the dbt DAG visualization but will be identified as "hidden.” + + + +* `v0.16.0`: This property was added + + + +## Default +The default value for `show` is `true`. + +## Examples +### Mark a model as hidden + +```yml +models: + - name: sessions__tmp + docs: + show: false +``` + + + +### Mark a subfolder of models as hidden + +**Note:** This can also hide dbt packages. + + + +```yml +models: + # hiding models within the staging subfolder + tpch: + staging: + +materialized: view + +docs: + show: false + + # hiding a dbt package + dbt_artifacts: + +docs: + show: false +``` + + + + + + + +## Custom node colors + +The `docs` attribute now supports `node_color` to customize the node color in the DAG within dbt docs. You can define node colors in the files below and apply overrides where needed. + +`node_color` hiearchy: + +`` overrides `schema.yml` overrides `dbt_project.yml` + + +## Examples + +Add custom node colors to models within subdirectories based on hex codes or a plain color name. + +![Example](../../../../website/static/img/node_color_example.png) + +`marts/core/fct_orders.sql` with `node_color: red` overrides `dbt_project.yml` with `node_color: gold` + +`marts/core/schema.yml` with `node_color: #000000` overrides `dbt_project.yml` with `node_color: gold` + + +```yml +models: + tpch: + staging: + +materialized: view + +docs: + node_color: "#cd7f32" + + marts: + core: + materialized: table + +docs: + node_color: "gold" +``` + + + + + +```yml +models: + - name: dim_customers + description: Customer dimensions table + docs: + node_color: '#000000' +``` + + + + + +```sql +{{ + config( + materialized = 'view', + tags=['finance'], + docs={'node_color': 'red'} + ) +}} + +with orders as ( + + select * from {{ ref('stg_tpch_orders') }} + +), +order_item as ( + + select * from {{ ref('order_items') }} + +), +order_item_summary as ( + + select + order_key, + sum(gross_item_sales_amount) as gross_item_sales_amount, + sum(item_discount_amount) as item_discount_amount, + sum(item_tax_amount) as item_tax_amount, + sum(net_item_sales_amount) as net_item_sales_amount + from order_item + group by + 1 +), +final as ( + + select + + orders.order_key, + orders.order_date, + orders.customer_key, + orders.status_code, + orders.priority_code, + orders.clerk_name, + orders.ship_priority, + + 1 as order_count, + order_item_summary.gross_item_sales_amount, + order_item_summary.item_discount_amount, + order_item_summary.item_tax_amount, + order_item_summary.net_item_sales_amount + from + orders + inner join order_item_summary + on orders.order_key = order_item_summary.order_key +) +select + * +from + final + +order by + order_date + +``` + + + +If a `node_color` is incompatible with dbt docs, you will see a compile error, as in the example below. + +```shell +Invalid color name for docs.node_color: aweioohafio23f. It is neither a valid HTML color name nor a valid HEX code. +``` + + + +```yml +models: + tpch: + marts: + core: + materialized: table + +docs: + node_color: "aweioohafio23f" +``` + + + + diff --git a/website/docs/reference/resource-configs/enabled.md b/website/docs/reference/resource-configs/enabled.md index 7f63a3bd0be..5e9667bd04c 100644 --- a/website/docs/reference/resource-configs/enabled.md +++ b/website/docs/reference/resource-configs/enabled.md @@ -11,6 +11,9 @@ default_value: true { label: 'Seeds', value: 'seeds', }, { label: 'Snapshots', value: 'snapshots', }, { label: 'Tests', value: 'tests', }, + { label: 'Sources', value: 'sources', }, + { label: 'Metrics', value: 'metrics', }, + { label: 'Exposures', value: 'exposures', }, ] }> @@ -133,6 +136,121 @@ tests: + + + + +```yaml +sources: + [](resource-path): + [+](plus-prefix)enabled: true | false + +``` + + + + + + + +```yaml +version: 2 + +sources: + - name: [] + [config](resource-properties/config): + enabled: true | false + tables: + - name: [] + [config](resource-properties/config): + enabled: true | false + +``` + + + + + + + + + + + +Support for disabling metrics was added in dbt Core v1.3 + + + + + + + +```yaml +metrics: + [](resource-path): + [+](plus-prefix)enabled: true | false + +``` + + + + + +```yaml +version: 2 + +metrics: + - name: [] + [config](resource-properties/config): + enabled: true | false + +``` + + + + + + + + + + + +Support for disabling exposures was added in dbt Core v1.3 + + + + + + + +```yaml +exposures: + [](resource-path): + [+](plus-prefix)enabled: true | false + +``` + + + + + +```yaml +version: 2 + +exposures: + - name: [] + [config](resource-properties/config): + enabled: true | false + +``` + + + + + + + ## Definition @@ -144,11 +262,11 @@ When a resource is disabled, dbt will not consider it as part of your project. N If you instead want to exclude a model from a particular run, consider using the `--exclude` parameter as part of the [model selection syntax](node-selection/syntax) -If you are disabling models because they are no longer being used, but you want to version control their SQL, consider making them an [analysis](docs/building-a-dbt-project/analyses.md) instead. +If you are disabling models because they are no longer being used, but you want to version control their SQL, consider making them an [analysis](/docs/build/analyses) instead. ## Examples ### Disable a model in a package in order to use your own version of the model. -This could be useful if you want to change the logic of a model in a package. For example, if you need to change the logic in the `segment_web_page_views` from the `segment` package ([original model](https://github.com/dbt-labs/segment/blob/master/models/base/segment_web_page_views.sql)): +This could be useful if you want to change the logic of a model in a package. For example, if you need to change the logic in the `segment_web_page_views` from the `segment` package ([original model](https://github.com/dbt-labs/segment/blob/main/models/base/segment_web_page_views.sql)): 1. Add a model named `segment_web_page_views` the same name to your own project. 2. To avoid a compilation error due to duplicate models, disable the segment package's version of the model like so: diff --git a/website/docs/reference/resource-configs/firebolt-configs.md b/website/docs/reference/resource-configs/firebolt-configs.md index 10d16089e0c..6fba429a2bd 100644 --- a/website/docs/reference/resource-configs/firebolt-configs.md +++ b/website/docs/reference/resource-configs/firebolt-configs.md @@ -27,7 +27,7 @@ A dbt model can be created as a Firebolt fact and configured { label: 'Config block', value: 'config', }, ] }> - + @@ -66,7 +66,7 @@ models: - + @@ -96,14 +96,14 @@ models: | Configuration | Description | |-------------------|-------------------------------------------------------------------------------------------| | `materialized` | How the model will be materialized into Firebolt. Must be `table` to create a fact table. | -| `table_type` | Whether the materialized table will be a [fact or dimension](https://docs.firebolt.io/concepts/working-with-tables#fact-and-dimension-tables) table. | +| `table_type` | Whether the materialized table will be a [fact or dimension](https://docs.firebolt.io/working-with-tables.html#fact-and-dimension-tables) table. | | `primary_index` | Sets the primary index for the fact table using the inputted list of column names from the model. Required for fact tables. | | `indexes` | A list of aggregating indexes to create on the fact table. | | `type` | Specifies whether the index is an aggregating index or join index. Join indexes only apply to dimension tables, so for fact tables set to `aggregating`. | | `key_column` | Sets the grouping of the aggregating index using the inputted list of column names from the model. | | `aggregation` | Sets the aggregations on the aggregating index using the inputted list of SQL agg expressions. | - + #### Example of a Fact Table With an Aggregating Index ``` @@ -121,7 +121,7 @@ models: ) }} ``` - + ## Model Configuration for Dimension Tables A dbt model can be materialized as a Firebolt dimension table and configured using the following syntax: @@ -195,19 +195,19 @@ models: - + #### Dimension Table Configurations | Configuration | Description | |--------------------|-------------------------------------------------------------------------------------------| | `materialized` | How the model will be materialized into Firebolt. Must be `table` to create a dimension table. | -| `table_type` | Whether the materialized table will be a [fact or dimension](https://docs.firebolt.io/concepts/working-with-tables#fact-and-dimension-tables) table. | +| `table_type` | Whether the materialized table will be a [fact or dimension](https://docs.firebolt.io/working-with-tables.html#fact-and-dimension-tables) table. | | `indexes` | A list of join indexes to create on the dimension table. | | `type` | Specifies whether the index is an aggregating index or join index. Aggregating indexes only apply to fact tables, so for dimension tables set to `join`. | | `join_column` | Sets the join key of the join index using the inputted column name from the model. | | `dimension_column` | Sets the columns to be loaded into memory on the join index using the inputted list of column names from the mode. | - + #### Example of a Dimension Table With a Join Index ``` @@ -224,25 +224,25 @@ models: ) }} ``` - + ## How Aggregating Indexes and Join Indexes Are Named In dbt-firebolt, you do not provide names for aggregating indexes and join indexes; they are named programmatically. dbt will generate index names using the following convention: - + ``` _____ ``` For example, a join index could be named `my_users__id__join_1633504263` and an aggregating index could be named `my_orders__order_date__aggregating_1633504263`. - + ## Managing Ingestion via External Tables `dbt-firebolt` supports dbt's [external tables feature](https://docs.getdbt.com/reference/resource-properties/external), which allows dbt to manage the table ingestion process from S3 into Firebolt. This is an optional feature but can be highly convenient depending on your use case. More information on using external tables including properly configuring IAM can be found in the Firebolt [documentation](https://docs.firebolt.io/sql-reference/commands/ddl-commands#create-external-table). - + #### Installation of External Tables Package To install and use `dbt-external-tables` with Firebolt, you must: @@ -265,14 +265,14 @@ To install and use `dbt-external-tables` with Firebolt, you must: 3. Pull in the `packages.yml` dependencies by calling `dbt deps`. - + #### Using External Tables To use external tables, you must define a table as `external` in your `dbt_project.yml` file. Every external table must contain the fields `url`, `type`, and `object_pattern`. Note that the Firebolt external table specification requires fewer fields than what is specified in the dbt documentation. In addition to specifying the columns, an external table may specify partitions. Partitions are not columns and they cannot have the same name as columns. To avoid yaml parsing errors, remember to encase string literals (such as the `url` and `object_pattern` values) in single quotation marks. - + #### dbt_project.yml Syntax For an External Table ```yml @@ -312,11 +312,46 @@ $ dbt run-operation stage_external_sources # iterate through all source nodes, create or replace (no refresh command is required as data is fetched live from remote) $ dbt run-operation stage_external_sources --vars "ext_full_refresh: true" ``` - -## Seeds behavior - + +## Incremental models + +The [`incremental_strategy` configuration](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models#about-incremental_strategy) controls how dbt builds incremental models. Firebolt currently supports the `append` configuration. You can specify `incremental_strategy` in `dbt_project.yml` or within a model file's `config()` block. The `append` configuration is the default. Specifying this configuration is optional. + +The `append` strategy performs an `INSERT INTO` statement with all the new data based on the model definition. This strategy doesn't update or delete existing rows, so if you do not filter the data to the most recent records only, it is likely that duplicate records will be inserted. + +Example source code: + +``` +{{ config( + materialized = 'incremental', + incremental_strategy='append' +) }} + +/* All rows returned by this query will be appended to the existing model */ + + +select * from {{ ref('raw_orders') }} +{% if is_incremental() %} + where order_date > (select max(order_date) from {{ this }}) +{% endif %} +``` + +Example run code: + +```sql +CREATE DIMENSION TABLE IF NOT EXISTS orders__dbt_tmp AS +SELECT * FROM raw_orders +WHERE order_date > (SELECT MAX(order_date) FROM orders); + +INSERT INTO orders VALUES ([columns]) +SELECT ([columns]) +FROM orders__dbt_tmp; +``` + +## Seeds behavior + When running the ```dbt seed``` command we perform a `DROP CASCADE` operation instead of `TRUNCATE`. - + ## Practice - + You can look at our modified version of the jaffle_shop, [jaffle_shop_firebolt](https://github.com/firebolt-db/jaffle_shop_firebolt), to see how indexes, as well as external tables, can be set or clone and execute the commands listed in the README.md diff --git a/website/docs/reference/resource-configs/full_refresh.md b/website/docs/reference/resource-configs/full_refresh.md index d78766e9e6d..abdcc358f3f 100644 --- a/website/docs/reference/resource-configs/full_refresh.md +++ b/website/docs/reference/resource-configs/full_refresh.md @@ -67,18 +67,24 @@ Optionally set a resource to always or never full-refresh. `full_refresh` config will take precedence over the presence or absence of the `--full-refresh` flag. - If the `full_refresh` config is `none` or omitted, the resource will use the value of the `--full-refresh` flag. -This logic is encoded in the [`should_full_refresh()`](https://github.com/dbt-labs/dbt-core/blob/HEAD/core/dbt/include/global_project/macros/materializations/helpers.sql#L68) macro. + + +**Note:** The `--full-refresh` flag also supports a short name, `-f`. + + + +This logic is encoded in the [`should_full_refresh()`](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/include/global_project/macros/materializations/configs.sql#L6) macro. ## Usage ### Incremental models -* [How do I rebuild an incremental model?](configuring-incremental-models#how-do-i-rebuild-an-incremental-model) -* [What if the columns of my incremental model change?](configuring-incremental-models#what-if-the-columns-of-my-incremental-model-change) +* [How do I rebuild an incremental model?](/docs/build/incremental-models#how-do-i-rebuild-an-incremental-model) +* [What if the columns of my incremental model change?](/docs/build/incremental-models#what-if-the-columns-of-my-incremental-model-change) ### Seeds - + ## Recommendation Set `full_refresh: false` for models of especially large datasets, which you would _never_ want dbt to fully drop and recreate. diff --git a/website/docs/reference/resource-configs/glue-configs.md b/website/docs/reference/resource-configs/glue-configs.md index 62ec39bcbf7..6834c08ff3f 100644 --- a/website/docs/reference/resource-configs/glue-configs.md +++ b/website/docs/reference/resource-configs/glue-configs.md @@ -18,13 +18,13 @@ When materializing a model as `table`, you may include several optional configs ## Incremental models -dbt seeks to offer useful, intuitive modeling abstractions by means of its built-in configurations and materializations. +dbt seeks to offer useful, intuitive modeling abstractions by means of its built-in configurations and materializations. -For that reason, the dbt-glue plugin leans heavily on the [`incremental_strategy` config](configuring-incremental-models#about-incremental_strategy). This config tells the incremental materialization how to build models in runs beyond their first. It can be set to one of three values: +For that reason, the dbt-glue plugin leans heavily on the [`incremental_strategy` config](/docs/build/incremental-models#about-incremental_strategy). This config tells the incremental materialization how to build models in runs beyond their first. It can be set to one of three values: - **`append`** (default): Insert new records without updating or overwriting any existing data. - **`insert_overwrite`**: If `partition_by` is specified, overwrite partitions in the table with new data. If no `partition_by` is specified, overwrite the entire table with new data. - **`merge`** (Apache Hudi only): Match records based on a `unique_key`; update old records, insert new ones. (If no `unique_key` is specified, all new data is inserted, similar to `append`.) - + Each of these strategies has its pros and cons, which we'll discuss below. As with any model config, `incremental_strategy` may be specified in `dbt_project.yml` or within a model file's `config()` block. **Notes:** @@ -88,7 +88,7 @@ drop view spark_incremental__dbt_tmp ### The `insert_overwrite` strategy -This strategy is most effective when specified alongside a `partition_by` clause in your model config. dbt will run an [atomic `insert overwrite` statement](https://spark.apache.org/docs/latest/sql-ref-syntax-dml-insert-overwrite-table.html) that dynamically replaces all partitions included in your query. Be sure to re-select _all_ of the relevant data for a partition when using this incremental strategy. +This strategy is most effective when specified alongside a `partition_by` clause in your model config. dbt will run an [atomic `insert overwrite` statement](https://spark.apache.org/docs/3.1.2/sql-ref-syntax-dml-insert-overwrite-table.html) that dynamically replaces all partitions included in your query. Be sure to re-select _all_ of the relevant data for a partition when using this incremental strategy. If no `partition_by` is specified, then the `insert_overwrite` strategy will atomically replace all contents of the table, overriding all existing data with only the new records. The column schema of the table remains the same, however. This can be desirable in some limited circumstances, since it minimizes downtime while the table contents are overwritten. The operation is comparable to running `truncate` + `insert` on other databases. For atomic replacement of Delta-formatted tables, use the `table` materialization (which runs `create or replace`) instead. diff --git a/website/docs/reference/resource-configs/grants.md b/website/docs/reference/resource-configs/grants.md new file mode 100644 index 00000000000..c5c3b9e3ece --- /dev/null +++ b/website/docs/reference/resource-configs/grants.md @@ -0,0 +1,261 @@ +--- +resource_types: [models,seeds,snapshots] +datatype: "{}" +default_value: {} +id: "grants" +--- + +You can manage access to the datasets you're producing with dbt by using grants. To implement these permissions, define grants as resource configs on each model, seed, or snapshot. Define the default grants that apply to the entire project in your `dbt_project.yml`, and define model-specific grants within each model's SQL or YAML file. + +The grant resource configs enable you to apply permissions at build time to a specific set of recipients and model, seed, or snapshot. When your model, seed, or snapshot finishes building, dbt ensures that the grants on its view or table match exactly the grants you have configured. + +dbt aims to use the most efficient approach when updating grants, which varies based on the adapter you're using, and whether dbt is replacing or updating an object that already exists. You can always check the debug logs for the full set of grant and revoke statements that dbt runs. + +dbt encourages you to use grants as resource configs whenever possible in Core v1.2 and higher. In versions prior to Core v1.2, you were limited to using hooks for grants. Occasionally, you still might need to write grants statements manually and run them using hooks. For example, hooks may be appropriate if you want to: + +* Apply grants in a more complex or custom manner, beyond what the built-in grants capability can provide. +* Apply grants on other database objects besides views and tables. +* Take advantage of more-advanced permission capabilities offered by your data platform, for which dbt does not (yet!) offer out-of-the-box support using resource configuration. +* Create more granular row- and column-level access, use masking policies, or apply future grants. + +For more information on hooks, see [Hooks & operations](/docs/build/hooks-operations). + +## Definition + +You can use the `grants` field to set permissions or grants for a resource. When you run a model, seed or seed, or snapshot a snapshot, dbt will run `grant` and/or `revoke` statements to ensure that the permissions on the database object match the `grants` you have configured on the resource. + +Like all configurations, `grants` will be included in dbt project metadata, including [the manifest artifact](/reference/artifacts/manifest-json). + +### Common syntax + +Grants have two key components: + +* **Privilege:** A right to perform a specific action or set of actions on an object in the database, such as selecting data from a table. +* **Grantees:** One or more recipients of granted privileges. Some platforms also call these "principals." For example, a grantee could be a user, a group of users, a role held by one or more users (Snowflake), or a service account (BigQuery/GCP). + +## Configuring grants + +You can configure `grants` in `dbt_project.yml` to apply grants to many resources at once—all models in your project, a package, or a subfolder—and you can also configure `grants` one-by-one for specific resources, in yaml `config:` blocks or right within their `.sql` files. + + + + + + + +```yml +models: + - name: specific_model + config: + grants: + select: ['reporter', 'bi'] +``` + + + +The `grants` config can also be defined: + +- under the `models` config block in `dbt_project.yml` +- in a `config()` Jinja macro within a model's SQL file + +See [configs and properties](configs-and-properties) for details. + + + + + + + +```yml +seeds: + - name: seed_name + config: + grants: + select: ['reporter', 'bi'] +``` + + + +The `grants` config can also be defined under the `seeds` config block in `dbt_project.yml`. See [configs and properties](configs-and-properties) for details. + + + + + + + +```yml +snapshots: + - name: snapshot_name + config: + grants: + select: ['reporter', 'bi'] +``` + + + +The `grants` config can also be defined: + +- under the `snapshots` config block in `dbt_project.yml` +- in a `config()` Jinja macro within a snapshot's SQL block + +See [configs and properties](configs-and-properties) for details. + + + + +### Grant config inheritance + +When you set `grants` for the same model in multiple places, such as in `dbt_project.yml` and in a more-specific `.sql` or `.yml` file, dbt's default behavior replaces the less-specific set of grantees with the more-specific set of grantees. This "merge and clobber" behavior updates each privilege when dbt parses your project. + +For example: + + + +```yml +models: + +grants: + select: ['user_a', 'user_b'] +``` + + + + + +```sql +{{ config(grants = {'select': ['user_c']}) }} +``` + + + +As a result of this configuration, `specific_model` will be configured to grant the `select` privilege to `user_c` _only_. After you run `specific_model`, that is the only granted privilege you would see in the database, and the only `grant` statement you would find in dbt's logs. + +Let's say we wanted to _add_ `user_c` to the existing list of grantees receiving the `select` privilege on `specific_model`, rather than _replacing_ that list entirely. To accomplish that, we can use the `+` ("addition") symbol, prefixing the name of the privilege: + + + +```sql +{{ config(grants = {'+select': ['user_c']}) }} +``` + + + +Now, the model will grant select to `user_a`, `user_b`, AND `user_c`! + +**Notes:** +- This will only take effect for privileges which include the `+` prefix. Each privilege controls that behavior separately. If we were granting other privileges, in addition to `select`, and those privilege names lacked the `+` prefix, they would continue to "clobber" rather than "add" new grantees. +- This use of `+`, controlling clobber vs. add merge behavior, is distinct from the use of `+` in `dbt_project.yml` (shown in the example above) for defining configs with dictionary values. For more information, see [the plus prefix](https://docs.getdbt.com/reference/resource-configs/plus-prefix). +- `grants` is the first config to support a `+` prefix for controlling config merge behavior. Currently, it's the only one. If it proves useful, we may extend this capability to new and existing configs in the future. + +## General examples + +You can grant each permission to a single grantee, or a set of multiple grantees. In this example, we're granting `select` on this model to just `bi_user`, so that it can be queried in our Business Intelligence (BI) tool. + + + +```sql +{{ config(materialized = 'table', grants = { + 'select': 'bi_user' +}) }} +``` + + + +When dbt runs this model for the first time, it will create the table, and then run code like: +```sql +grant select on schema_name.table_model to bi_user; +``` + +In this case, we're creating an incremental model, and granting the `select` privilege to two recipients: `bi_user` and `reporter`. + + + +```sql +{{ config(materialized = 'incremental', grants = { + 'select': ['bi_user', 'reporter'] +}) }} +``` + + + +When dbt runs this model for the first time, it will create the table, and then run code like: +```sql +grant select on schema_name.incremental_model to bi_user, reporter; +``` + +In subsequent runs, dbt will use database-specific SQL to show the grants already on `incremental_model`, and then determine if any `revoke` or `grant` statements are needed. + + +## Database-specific requirements and notes + +While we try to standardize the terms we use to describe different features, you will always find nuances in different databases. This section outlines some of those database-specific requirements and notes. + +In our examples above and below, you will find us referring to a privilege named `select`, and a grantee named `another_user`. Many databases use these or similar terms. Be aware that your database may require different syntax for privileges and grantees; you must configure `grants` in dbt with the appropriate names for both. + + + +
    + +On BigQuery, "privileges" are called "roles," and they take the form `roles/service.roleName`. For instance, instead of granting `select` on a model, you would grant `roles/bigquery.dataViewer`. + +Grantees can be users, groups, service accounts, domains—and each needs to be clearly demarcated as such with a prefix. For instance, to grant access on a model to `someone@yourcompany.com`, you need to specify them as `user:someone@yourcompany.com`. + +We encourage you to read Google's documentation for more context: +- [Understanding GCP roles](https://cloud.google.com/iam/docs/understanding-roles) +- [How to format grantees](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-control-language#user_list) + + + +### BigQuery examples + +Granting permission using SQL and BigQuery: + +```sql +{{ config(grants = {'roles/bigquery.dataViewer': ['user:someone@yourcompany.com']}) }} +``` + +Granting permission in a model schema using BigQuery: + + + +```yml +models: + - name: specific_model + config: + grants: + roles/bigquery.dataViewer: ['user:someone@yourcompany.com'] +``` + + + +
    + +
    + +- OSS Apache Spark / Delta Lake do not support `grants`. +- Databricks automatically enables `grants` on SQL endpoints. For interactive clusters, admins should enable grant functionality using these two setup steps in the Databricks documentation: + - [Enable table access control for your workspace](https://docs.databricks.com/administration-guide/access-control/table-acl.html) + - [Enable table access control for a cluster](https://docs.databricks.com/security/access-control/table-acls/table-acl.html) + +
    + +
    + +* No special requirements at this time. + +
    + +
    + +* dbt accounts for the [`copy_grants` configuration](/reference/resource-configs/snowflake-configs#copying-grants) when calculating which grants need to be added or removed. + +
    + +
    diff --git a/website/docs/reference/resource-configs/greenplum-configs.md b/website/docs/reference/resource-configs/greenplum-configs.md new file mode 100644 index 00000000000..b9115553542 --- /dev/null +++ b/website/docs/reference/resource-configs/greenplum-configs.md @@ -0,0 +1,133 @@ +--- +title: "Greenplum configurations" +id: "greenplum-configs" +--- + +## Performance Optimizations + +Tables in Greenplum have powerful optimization configurations to improve query performance: + + - distribution + - column orientation + - compression + - `appendonly` toggle + - partitions + +Supplying these values as model-level configurations apply the corresponding settings in the generated `CREATE TABLE`(except partitions). Note that these settings will have no effect for models set to `view`. + +### Distribution + +In Greenplum, you can choose a [distribution key](https://gpdb.docs.pivotal.io/6-4/admin_guide/distribution.html), that will be used to sort data by segments. Joining on the partition will become more performant after specifying distribution. + +By default dbt-greenplum distributes data `RANDOMLY`. To implement a distribution key you need to specify the `distributed_by` parameter in model's config: + +```sql +{{ + config( + ... + distributed_by='' + ... + ) +}} + + +select ... +``` + +Also you can choose `DISTRIBUTED REPLICATED` option: + +```sql +{{ + config( + ... + distributed_replicated=true + ... + ) +}} + + +select ... +``` + +### Column orientation + +Greenpum supports two type of [orientation](https://gpdb.docs.pivotal.io/6-6/admin_guide/ddl/ddl-storage.html#topic39) row and column: + +```sql +{{ + config( + ... + orientation='column' + ... + ) +}} + + +select ... +``` + +### Compression + +Compression allows reducing read-write time. Greenplum suggest several [algorithms](https://gpdb.docs.pivotal.io/6-6/admin_guide/ddl/ddl-storage.html#topic40) algotihms to compress append-optimized tables: + - RLE_TYPE(only for column oriented table) + - ZLIB + - ZSTD + - QUICKLZ + +```sql +{{ + config( + ... + appendonly='true', + compresstype='ZLIB', + compresslevel=3, + blocksize=32768 + ... + ) +}} + + +select ... +``` + +As you can see, you can also specify `compresslevel` and `blocksize`. + +### Partition + +Greenplum does not support partitions with `create table as` [construction](https://gpdb.docs.pivotal.io/6-9/ref_guide/sql_commands/CREATE_TABLE_AS.html), so you need to build model in two steps + +1. create table schema +2. insert data + +To implement partitions into your dbt-model you need to specify the following config parameters: + - `fields_string` - definition of columns name, type and constraints + - `raw_partition` - partition specification + +```sql +{% set fields_string %} + some_filed int4 null, + date_field timestamp NULL +{% endset %} + + +{% set raw_partition %} + PARTITION BY RANGE (date_field) + ( + START ('2021-01-01'::timestamp) INCLUSIVE + END ('2023-01-01'::timestamp) EXCLUSIVE + EVERY (INTERVAL '1 day'), + DEFAULT PARTITION default_part + ); +{% endset %} + +{{ + config( + ... + fields_string=fields_string, + raw_partition=raw_partition, + ... + ) +}} + +select * +``` \ No newline at end of file diff --git a/website/docs/reference/resource-configs/hive-configs.md b/website/docs/reference/resource-configs/hive-configs.md new file mode 100644 index 00000000000..1fb498ec8ce --- /dev/null +++ b/website/docs/reference/resource-configs/hive-configs.md @@ -0,0 +1,52 @@ +--- +title: "Apache Hive configurations" +id: "hive-configs" +--- + +## Configuring tables + +When materializing a model as `table`, you may include several optional configs that are specific to the dbt-hive plugin, in addition to the standard [model configs](model-configs). + +| Option | Description | Required? | Example | +|---------|----------------------------------------------------|-------------------------|--------------------------| +| partition_by | partition by a column, typically a directory per partition is created | No | partition_by=['name'] | +| clustered_by | second level division of a partitioned column | No | clustered_by=['age'] | +| file_format | underlying storage format of the table, see https://cwiki.apache.org/confluence/display/Hive/FileFormats for supported formats | No | file_format='PARQUET' | +| location | storage location, typically an hdfs path | No | LOCATION='/user/etl/destination' | +| comment | comment for the table | No | comment='this is the cleanest model' | + +## Incremental models + +Supported modes for incremental model: + - **`append`** (default): Insert new records without updating or overwriting any existing data. + - **`insert_overwrite`**: For new records, insert data. When used along with partition clause, update data for changed record and insert data for new records. + +## Example: Using partition_by config option + + + +```sql +{{ + config( + materialized='table', + unique_key='id', + partition_by=['city'], + ) +}} + +with source_data as ( + select 1 as id, "Name 1" as name, "City 1" as city, + union all + select 2 as id, "Name 2" as name, "City 2" as city, + union all + select 3 as id, "Name 3" as name, "City 2" as city, + union all + select 4 as id, "Name 4" as name, "City 1" as city, +) + +select * from source_data +``` + + + +In the above example, a sample table is created with partition_by and other config options. One thing to note when using partition_by option is that the select query should always have the column name used in partition_by option as the last one, as can be seen for the ```city``` column name used in the above query. If the partition_by clause is not the same as the last column in select statement, Hive will flag an error when trying to create the model. diff --git a/website/docs/reference/resource-configs/impala-configs.md b/website/docs/reference/resource-configs/impala-configs.md new file mode 100644 index 00000000000..5564df115e7 --- /dev/null +++ b/website/docs/reference/resource-configs/impala-configs.md @@ -0,0 +1,66 @@ +--- +title: "Apache Impala configurations" +id: "impala-configs" +--- + +## Configuring tables + +When materializing a model as `table`, you may include several optional configs that are specific to the dbt-impala plugin, in addition to the standard [model configs](model-configs). + +| Option | Description | Required? | Example | +|---------|----------------------------------------------------|-------------------------|--------------------------| +| partition_by | partition by a column, typically a directory per partition is created | No | partition_by=['name'] | +| sort_by | sort by a column | No | sort_by=['age'] | +| row_format | format to be used when storing individual arows | No | row_format='delimited' | +| stored_as | underlying storage format of the table | No | stored_as='PARQUET' | +| location | storage location, typically an hdfs path | No | LOCATION='/user/etl/destination' | +| comment | comment for the table | No | comment='this is the cleanest model' | +| serde_properties | SerDes ([de-]serialization) prperties of table | No | serde_properties="('quoteChar'='\'', 'escapeChar'='\\')" | +| tbl_properties | any metadata can be stored as key/value pair with the table | No | tbl_properties="('dbt_test'='1')" | +| is_cached | true or false - if this table is cached | No | is_cached=false (default) | +| cache_pool | cache pool name to use if is_cached is set to true | No | | +| replication_factor | cache replication factor to use if is_cached is set to true | No | | +| external | is this an external table - true / false | No | external=true | + +For Cloudera specific options for above parameters see documentation of CREATE TABLE (https://docs.cloudera.com/documentation/enterprise/6/6.3/topics/impala_create_table.html) + +## Incremental models + +Supported modes for incremental model: + - **`append`** (default): Insert new records without updating or overwriting any existing data. + - **`insert_overwrite`**: For new records, insert data. When used along with partition clause, update data for changed record and insert data for new records. + + +Unsupported modes: + - **`unique_key`** This is not suppored option for incremental models in dbt-impala + - **`merge`**: Merge is not supported by the underlying warehouse, and hence not supported by dbt-impala + +## Example: Using partition_by config option + + + +```sql +{{ + config( + materialized='table', + unique_key='id', + partition_by=['city'], + ) +}} + +with source_data as ( + select 1 as id, "Name 1" as name, "City 1" as city, + union all + select 2 as id, "Name 2" as name, "City 2" as city, + union all + select 3 as id, "Name 3" as name, "City 2" as city, + union all + select 4 as id, "Name 4" as name, "City 1" as city, +) + +select * from source_data +``` + + + +In the above example, a sample table is created with partition_by and other config options. One thing to note when using partition_by option is that the select query should always have the column name used in partition_by option as the last one, as can be seen for the ```city``` column name used in the above query. If the partition_by clause is not the same as the last column in select statement, Impala will flag an error when trying to create the model. diff --git a/website/docs/reference/resource-configs/materialize-configs.md b/website/docs/reference/resource-configs/materialize-configs.md index 0d4b4705e84..a565156c459 100644 --- a/website/docs/reference/resource-configs/materialize-configs.md +++ b/website/docs/reference/resource-configs/materialize-configs.md @@ -5,21 +5,65 @@ id: "materialize-configs" ## Performance optimizations +### Clusters + + + +- **v1.2.0:** Enable the configuration of [clusters](https://github.com/MaterializeInc/materialize/blob/main/misc/dbt-materialize/CHANGELOG.md#120---2022-08-31). + + + +The default [cluster](https://materialize.com/docs/overview/key-concepts/#clusters) that is used to maintain materialized views or indexes can be configured in your [profile](/reference/profiles.yml) using the `cluster` connection parameter. To override the cluster that is used for specific models (or groups of models), use the `cluster` configuration parameter. + + + +```sql +{{ config(materialized='materializedview', cluster='not_default') }} + +select ... +``` + + + + + +```yaml +models: + project_name: + +materialized: materializedview + +cluster: not_default +``` + + + + + +### Incremental models: Materialized Views + +Materialize, at its core, is a real-time database that delivers incremental view updates without ever compromising on latency or correctness. Use [materialized views](https://materialize.com/docs/overview/key-concepts/#materialized-views) to compute and incrementally update the results of your query. + ### Indexes -:::info Advanced feature - Manually creating indexes in Materialize is an advanced feature that most users **do not** need. See the [Materialize documentation](https://materialize.com/docs/sql/create-index/) for more details. -::: + + +- **v1.2.0:** Enable additional configuration for [indexes](https://github.com/MaterializeInc/materialize/blob/main/misc/dbt-materialize/CHANGELOG.md#120---2022-08-31). + + + +Like in any standard relational database, you can use [indexes](https://materialize.com/docs/overview/key-concepts/#indexes) to optimize query performance in Materialize. Improvements can be significant, reducing response times down to single-digit milliseconds. Materialized views (`materializedview`), views (`view`) and sources (`source`) may have a list of `indexes` defined. Each [Materialize index](https://materialize.com/docs/sql/create-index/) can have the following components: -- `columns` (list, required): one or more columns on which the index is defined -- `type` (string, optional): a supported index type. The only supported type is [`arrangement`](https://materialize.com/docs/overview/arrangements/). +- `columns` (list, required): one or more columns on which the index is defined. To create an index that uses _all_ columns, use the `default` component instead. +- `name` (string, optional): the name for the index. If unspecified, Materialize will use the materialization name and column names provided. +- `cluster` (string, optional): the cluster to use to create the index. If unspecified, indexes will be created in the cluster used to create the materialization. +- `default` (bool, optional): Default: `False`. If set to `True`, creates a default index that uses all columns. - + ```sql {{ config(materialized='view', + indexes=[{'columns': ['col_a'], 'cluster': 'cluster_a'}]) }} indexes=[{'columns': ['symbol']}]) }} select ... @@ -27,25 +71,34 @@ select ... -If one or more indexes are configured on a resource, dbt will run `create index` statement(s) as part of that resource's , within the same transaction as its main `create` statement. For the index's name, dbt uses a hash of its properties and the current timestamp, in order to guarantee uniqueness and avoid namespace conflict with other indexes. + ```sql -create index if not exists -"3695050e025a7173586579da5b27d275" -on "my_target_database"."my_target_schema"."view_model" -(symbol); +{{ config(materialized='view', + indexes=[{'default': True}]) }} + +select ... ``` -You can also configure indexes for a number of resources at once: + + +### Tests + + + +- **v1.1.1:** Provide support for storing the results of a test query in a materialized view, using the `store_failures` config. + + + +If you set the optional `--store-failures` flag or [`store_failures` config](resource-configs/store_failures), dbt will create a materialized view using the test query. This view is a continuously updating representation of failures. ```yaml -models: +tests: project_name: - subdirectory: - +indexes: - - columns: ['symbol'] + +store_failures: true + +schema: test ``` - + \ No newline at end of file diff --git a/website/docs/reference/resource-configs/meta.md b/website/docs/reference/resource-configs/meta.md index 73823f81f3f..948982925b0 100644 --- a/website/docs/reference/resource-configs/meta.md +++ b/website/docs/reference/resource-configs/meta.md @@ -242,7 +242,7 @@ seeds: ### Override one meta attribute for a single model - + ```sql {{ config(meta = { diff --git a/website/docs/reference/resource-configs/mindsdb-configs.md b/website/docs/reference/resource-configs/mindsdb-configs.md new file mode 100644 index 00000000000..c100887138c --- /dev/null +++ b/website/docs/reference/resource-configs/mindsdb-configs.md @@ -0,0 +1,61 @@ +--- +title: "MindsDB configurations" +id: "mindsdb-configs" +--- + +## Authentication + +To succesfully connect dbt to MinsDB you will need to provide the following configuration from the MindsDB instance. + +| Key | Required| Description | Self-hosted | MindsDB Cloud | +|---------|-------------------------|---------------------------|-------------------------|--------------------------| +| type | ✔️ | The specific adapter to use | `mindsdb` | `mindsdb` | +| host | ✔️ | The MindsDB (hostname) to connect to | Default to `172.0.0.1` | Default to `cloud.mindsdb.com`| +| port | ✔️ | The port to use | Default to `47335` | Default to `3306`| +| schema | ✔️ | Specify the schema (database) to build models into | The MindsDB [integration name](https://docs.mindsdb.com/sql/create/databases/)|The MindsDB [integration name](https://docs.mindsdb.com/sql/create/databases/) +| username | ✔️ | The username to use to connect to the server | Default to `mindsdb` | Your mindsdb cloud username| +| password | ✔️ | The password to use for authenticating to the server | No password by default| Your mindsdb cloud password| + +## Usage + +Create dbt project, choose mindsdb as the database and set up the connection. Verify your connection works `dbt debug` + +`dbt init ` + +To create a predictor, create a dbt model with a "predictor" materialization. The name of the model will be the name of predictor. + +#### Parameters: +- `integration` - name of used integration to get data from and save result to. Must be created in mindsdb beforehand using the [`CREATE DATABASE` syntax](https://docs.mindsdb.com/sql/create/databases/). +- `predict` - field for prediction +- `predict_alias` [optional] - alias for predicted field +- `using` [optional] - options for configure trained model + +```sql +-- my_first_model.sql + {{ + config( + materialized='predictor', + integration='photorep', + predict='name', + predict_alias='name', + using={ + 'encoders.location.module': 'CategoricalAutoEncoder', + 'encoders.rental_price.module': 'NumericEncoder' + } + ) + }} + select * from stores +``` + +To apply predictor add dbt model with "table" materialization. It creates or replaces table in selected integration with results of predictor. +Name of the model is used as name of the table to store prediction results. +If you need to specify schema you can do it with dot separator: schema_name.table_name.sql + +#### Parameters +- `predictor_name` - name of the predictor. It has to be created in mindsdb. +- `integration` - name of used integration to get data from and save result to. Must be created in mindsdb beforehand using the [`CREATE DATABASE` syntax](https://docs.mindsdb.com/sql/create/databases/). + +``` + {{ config(materialized='table', predictor_name='TEST_PREDICTOR_NAME', integration='photorep') }} + select a, bc from ddd where name > latest +``` diff --git a/website/docs/reference/resource-configs/mssql-configs.md b/website/docs/reference/resource-configs/mssql-configs.md new file mode 100644 index 00000000000..502dd0d574b --- /dev/null +++ b/website/docs/reference/resource-configs/mssql-configs.md @@ -0,0 +1,150 @@ +--- +title: "Microsoft SQL Server configurations" +id: "mssql-configs" +--- + +## Materializations + +Ephemeral materialization is not supported due to T-SQL not supporting nested CTEs. It may work in some cases when you're working with very simple ephemeral models. + +### Tables + +Tables will, by default, be materialized as a columnstore tables. +This requires SQL Server 2017 or newer for on-premise instances or service tier S2 or higher for Azure. + +This behaviour can be disabled by setting the `as_columnstore` configuration option to `False`. + + + + + + + +```sql +{{ + config( + as_columnstore='False' + ) +}} + +select * +from ... +``` + + + + + + + + + +```yaml +models: + your_project_name: + materialized: view + staging: + materialized: table + as_columnstore: False +``` + + + + + + + +## Seeds + +By default, `dbt-sqlserver` will attempt to insert seed files in batches of 400 rows. +If this exceeds SQL Server's 2100 parameter limit, the adapter will automatically limit to the highest safe value possible. + +To set a different default seed value, you can set the variable `max_batch_size` in your project configuration. + + + +```yaml +vars: + max_batch_size: 200 # Any integer less than or equal to 2100 will do. +``` + + + +## Snapshots + +Columns in source tables can not have any constraints. +If, for example, any column has a `NOT NULL` constraint, an error will be thrown. + +## Indices + +You can specify indices to be created for your table by specifying post-hooks calling purpose-built macros. + +The following macros are available: + +* `create_clustered_index(columns, unique=False)`: columns is a list of columns, unique is an optional boolean (defaults to False). +* `create_nonclustered_index(columns, includes=columns)`: columns is a list of columns, includes is an optional list of columns to include in the index. +* `drop_all_indexes_on_table()`: drops current indices on a table. Only meaningful if the model is incremental.` + +Some examples: + + + +```sql +{{ + config({ + "as_columnstore": false, + "materialized": 'table', + "post-hook": [ + "{{ create_clustered_index(columns = ['row_id', 'row_id_complement'], unique=True) }}", + "{{ create_nonclustered_index(columns = ['modified_date']) }}", + "{{ create_nonclustered_index(columns = ['row_id'], includes = ['modified_date']) }}", + ] + }) + +}} + +select * +from ... +``` + + + +## Grants with auto provisioning + +dbt 1.2 introduced the capability to grant/revoke access using the `grants` [configuration option](grants). +In dbt-sqlserver, you can additionally set `auto_provision_aad_principals` to `true` in your model configuration if you are using Azure Active Directory authentication with an Azure SQL Database or Azure Synapse Dedicated SQL Pool. + +This will automatically create the Azure Active Directory principal inside your database if it does not exist yet. +Note that the principals need to exist in your Azure Active Directory, this just makes them available to use in your database. + +Principals are not removed again when they are removed from the grants configuration. + + + +```yaml +models: + your_project_name: + auto_provision_aad_principals: true +``` + + + +## cross-database macros + +The following macros are currently not supported: + +* `bool_or` +* `array_construct` +* `array_concat` +* `array_append` + +## dbt-utils + +Many [`dbt-utils`](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/) are supported, +but require the installation of the [`tsql_utils`](https://hub.getdbt.com/dbt-msft/tsql_utils/latest/) dbt package. diff --git a/website/docs/reference/resource-configs/no-configs.md b/website/docs/reference/resource-configs/no-configs.md new file mode 100644 index 00000000000..a9fe4ec8752 --- /dev/null +++ b/website/docs/reference/resource-configs/no-configs.md @@ -0,0 +1,11 @@ +--- +title: "No specifc configurations for this Adapter" +id: "no-configs" +--- + +If you were guided to this page from a data platform setup article, it most likely means: + +- Setting up the profile is the only action the end-user needs to take on the data platform, or +- The subsequent actions the end-user needs to take are not currently documented + +If you'd like to contribute to data platform-specifc configuration information, refer to [Documenting a new adapter](5-documenting-a-new-adapter) \ No newline at end of file diff --git a/website/docs/reference/resource-configs/persist_docs.md b/website/docs/reference/resource-configs/persist_docs.md index 6514457bce9..9dec5b6cefe 100644 --- a/website/docs/reference/resource-configs/persist_docs.md +++ b/website/docs/reference/resource-configs/persist_docs.md @@ -120,11 +120,49 @@ resources as needed. ## Support -The `persist_docs` config is supported on all core dbt plugins: BigQuery, -Redshift, Snowflake, and Postgres. Some databases impose limitations on the -types of descriptions that can be added to database objects. At present, the -`persist_docs` flag has the following known limitations: - - Column-level comments are not supported on Snowflake views +The `persist_docs` config is supported on the most widely used dbt adapters: +- Postgres +- Redshift +- Snowflake +- BigQuery +- Apache Spark & Databricks + +However, some databases limit where and how descriptions can be added to database objects. Those database adapters might not support `persist_docs`, or might offer only partial support. + +Some known issues and limitations: + + + +
    + +- Column-level comments require `file_format: delta` (or another "v2 file format") +- Column-level comments aren't supported for models materialized as views ([issue](https://github.com/dbt-labs/dbt-spark/issues/372)) + +
    + +
    + + + +- No known issues + + + + + +- Column names that must be quoted, such as column names containing special characters, will cause runtime errors if column-level `persist_docs` is enabled. This is fixed in v1.2. + + + + + +- Column-level comments aren't supported for models materialized as views + + + +
    + +
    ## Usage diff --git a/website/docs/reference/resource-configs/plus-prefix.md b/website/docs/reference/resource-configs/plus-prefix.md index 42501f1197d..c054ed5a81b 100644 --- a/website/docs/reference/resource-configs/plus-prefix.md +++ b/website/docs/reference/resource-configs/plus-prefix.md @@ -61,4 +61,10 @@ models:
    -Since it doesn't hurt to use the `+` prefix, we recommend you use it whenever adding configs to your `dbt_project.yml` file. +When adding configs in `dbt_project.yml`, it doesn't hurt to use the `+` prefix, so we recommend you use it always. + + + +**Note:** This use of the `+` prefix, in `dbt_project.yml`, is distinct from the use of `+` to control config merge behavior (clobber vs. add) in other config settings (specific resource `.yml` and `.sql` files). Currently, the only config which supports `+` for controlling config merge behavior is [`grants`](grants#grant-config-inheritance). + + diff --git a/website/docs/reference/resource-configs/postgres-configs.md b/website/docs/reference/resource-configs/postgres-configs.md index abc43992c65..d28c073b065 100644 --- a/website/docs/reference/resource-configs/postgres-configs.md +++ b/website/docs/reference/resource-configs/postgres-configs.md @@ -3,6 +3,7 @@ title: "Postgres configurations" id: "postgres-configs" --- + ## Performance Optimizations ### Unlogged @@ -36,6 +37,8 @@ models: ### Indexes +While Postgres works reasonably well for datasets smaller than about 10mm rows, database tuning is sometimes required. It's important to create indexes for columns that are commonly used in joins or where clauses. + - **v0.20.0:** Introduced native support for `indexes` config diff --git a/website/docs/reference/resource-configs/pre-hook-post-hook.md b/website/docs/reference/resource-configs/pre-hook-post-hook.md index 09c5aa27716..d3fb6c9c3e7 100644 --- a/website/docs/reference/resource-configs/pre-hook-post-hook.md +++ b/website/docs/reference/resource-configs/pre-hook-post-hook.md @@ -100,9 +100,13 @@ select ... ## Definition -A SQL statement (or list of SQL statements) to be run before or after a model, seed or snapshot is built. +A SQL statement (or list of SQL statements) to be run before or after a model, seed, or snapshot is built. -Pre- and post-hooks can also call macros that return SQL statements. +Pre- and post-hooks can also call macros that return SQL statements. If your macro depends on values available only at execution time, such as using model configurations or `ref()` calls to other resources as inputs, you will need to [wrap your macro call in an extra set of curly braces](dont-nest-your-curlies#an-exception). + +### Why would I use hooks? + +dbt aims to provide all the boilerplate SQL you need (DDL, DML, and DCL) via out-of-the-box functionality, which you can configure quickly and concisely. In some cases, there may be SQL that you want or need to run, specific to functionality in your data platform, which dbt does not (yet) offer as a built-in feature. In those cases, you can write the exact SQL you need, using dbt's compilation context, and pass it into a `pre-` or `post-` hook to run before or after your model, seed, or snapshot. @@ -112,6 +116,52 @@ Pre- and post-hooks can also call macros that return SQL statements. ## Examples + + + + + +### [Redshift] Unload one model to S3 + + + +```sql +{{ config( + post_hook = "unload ('select from {{ this }}') to 's3:/bucket_name/{{ this }}" +) }} + +select ... +``` + + + +See: [Redshift docs on `UNLOAD`](https://docs.aws.amazon.com/redshift/latest/dg/r_UNLOAD.html) + +### [Apache Spark] Analyze tables after creation + + + +```yml + +model: + jaffle_shop: # this is the project name + marts: + finance: + +post-hook: + # this can be a list + - "analyze table {{ this }} compute statistics for all columns" + # or call a macro instead + - "{{ analyze_table() }}" +``` + +See: [Apache Spark docs on `ANALYZE TABLE`](https://spark.apache.org/docs/latest/sql-ref-syntax-aux-analyze-table.html) + + + + + + + ### Grant privileges on a model @@ -171,6 +221,8 @@ model: + + ### Additional examples We've compiled some more in-depth examples [here](hooks-operations#additional-examples). @@ -192,7 +244,7 @@ There may be occasions where you need to run these hooks _outside_ of a transact * You want to run a `VACUUM` in a `post-hook`, however this cannot be executed within a transaction ([Redshift docs](https://docs.aws.amazon.com/redshift/latest/dg/r_VACUUM_command.html#r_VACUUM_usage_notes)) * You want to insert a record into an audit at the start of a run, and do not want that statement rolled back if the model creation fails. -To achieve this, you can use one of the following syntaxes: +To achieve this, you can use one of the following syntaxes. (Note: You should NOT use this syntax if using a database where dbt does not use transactions by default, including Snowflake, BigQuery, and Spark/Databricks.) #### Config block: use the `before_begin` and `after_commit` helper macros diff --git a/website/docs/reference/resource-configs/quote_columns.md b/website/docs/reference/resource-configs/quote_columns.md index a9c5fc24843..5701fe0f11f 100644 --- a/website/docs/reference/resource-configs/quote_columns.md +++ b/website/docs/reference/resource-configs/quote_columns.md @@ -46,9 +46,10 @@ seeds:
    +Or (as of v0.21): + -Or (as of v0.21): ```yml version: 2 diff --git a/website/docs/reference/resource-configs/schema.md b/website/docs/reference/resource-configs/schema.md index ce055c9a038..3edb96f2ea6 100644 --- a/website/docs/reference/resource-configs/schema.md +++ b/website/docs/reference/resource-configs/schema.md @@ -9,7 +9,7 @@ This is a work in progress document. While this configuration applies to multipl ::: ## Definition -Optionally specify a custom schema for a [model](docs/building-a-dbt-project/building-models) or [seed](docs/building-a-dbt-project/seeds). (To specify a schema for a [snapshot](snapshots), use the [`target_schema` config](target_schema)). +Optionally specify a custom schema for a [model](/docs/build/sql-models) or [seed](/docs/build/seeds). (To specify a schema for a [snapshot](snapshots), use the [`target_schema` config](target_schema)). When dbt creates a relation (/) in a database, it creates it as: `{{ database }}.{{ schema }}.{{ identifier }}`, e.g. `analytics.finance.payments` @@ -17,7 +17,7 @@ The standard behavior of dbt is: * If a custom schema is _not_ specified, the schema of the relation is the target schema (`{{ target.schema }}`). * If a custom schema is specified, by default, the schema of the relation is `{{ target.schema }}_{{ schema }}`. -To learn more about changing the way that dbt generates a relation's `schema`, read [Using Custom Schemas](docs/building-a-dbt-project/building-models/using-custom-schemas) +To learn more about changing the way that dbt generates a relation's `schema`, read [Using Custom Schemas](/docs/build/custom-schemas) ## Usage diff --git a/website/docs/reference/resource-configs/severity.md b/website/docs/reference/resource-configs/severity.md index 78525bf3df4..436bfda9460 100644 --- a/website/docs/reference/resource-configs/severity.md +++ b/website/docs/reference/resource-configs/severity.md @@ -1,4 +1,7 @@ --- +title: "Configuring test `severity`" +id: "severity" +description: "You can use error thresholds to configure the severity of test results and set when to produce an error or warning based on the number of failed tests." resource_types: [tests] datatype: string --- @@ -108,7 +111,7 @@ Set the default for all tests in a package or project: ```yaml tests: +severity: warn # all tests - + : +warn_if: >10 # tests in ``` diff --git a/website/docs/reference/resource-configs/singlestore-configs.md b/website/docs/reference/resource-configs/singlestore-configs.md new file mode 100644 index 00000000000..f503779f0fc --- /dev/null +++ b/website/docs/reference/resource-configs/singlestore-configs.md @@ -0,0 +1,111 @@ +--- +title: "SingleStore configurations" +id: "singlestore-configs" +--- + + + + + - **v1.1.2:** Added support for for `storage_type`, `indexes`, `primary_key`, `sort_key`, `shard_key`, `unique_table_key`, `charset`, `collation` options for creating SingleStore tables. + + + +## Performance Optimizations +[SingleStore Physical Database Schema Design documentation](https://docs.singlestore.com/managed-service/en/create-a-database/physical-database-schema-design/concepts-of-physical-database-schema-design.html) is helpful if you want to use specific options (that are described below) in your dbt project. + + +### Storage type +SingleStore supports two storage types: **In-Memory Rowstore** and **Disk-based Columnstore** (the latter is default). See [the docs](https://docs.singlestore.com/managed-service/en/create-a-database/physical-database-schema-design/concepts-of-physical-database-schema-design/choosing-a-table-storage-type.html) for details. The dbt-singlestore adapter allows you to specify which storage type your table materialization would rely on using `storage_type` config parameter. + + + +```sql +{{ config(materialized='table', storage_type='rowstore') }} + +select ... +``` + + + +### Keys + +SingleStore tables are [sharded](https://docs.singlestore.com/managed-service/en/getting-started-with-managed-service/about-managed-service/sharding.html) and can be created with various column definitions. The following options are supported by the dbt-singlestore adapter, each of them accepts `column_list` (a list of column names) as an option value. Please refer to [Creating a Columnstore Table](https://docs.singlestore.com/managed-service/en/create-a-database/physical-database-schema-design/procedures-for-physical-database-schema-design/creating-a-columnstore-table.html) for more informartion on various key types in SingleStore. +- `primary_key` (translated to `PRIMARY KEY (column_list)`) +- `sort_key` (translated to `KEY (column_list) USING CLUSTERED COLUMNSTORE`) +- `shard_key` (translated to `SHARD KEY (column_list)`) +- `unique_table_key` (translated to `UNIQUE KEY (column_list)`) + + + +```sql +{{ + config( + primary_key=['id', 'user_id'], + shard_key=['id'] + ) +}} + +select ... +``` + + + + + +```sql +{{ + config( + materialized='table', + unique_table_key=['id'], + sort_key=['status'], + ) +}} + +select ... +``` + + + +### Indexes +Similarly to the Postgres adapter, table models, incremental models, seeds, and snapshots may have a list of `indexes` defined. Each index can have the following components: +- `columns` (list, required): one or more columns on which the index is defined +- `unique` (boolean, optional): whether the index should be declared unique +- `type` (string, optional): a supported [index type](https://docs.singlestore.com/managed-service/en/reference/sql-reference/data-definition-language-ddl/create-index.html), `hash` or `btree` + +As SingleStore tables are sharded, there are certain limitations to indexes creation, see the [docs](https://docs.singlestore.com/managed-service/en/create-a-database/physical-database-schema-design/concepts-of-physical-database-schema-design/understanding-keys-and-indexes-in-singlestore.html) for more details. + + + +```sql +{{ + config( + materialized='table', + shard_key=['id'], + indexes=[{'columns': ['order_date', 'id']}, {'columns': ['status'], 'type': 'hash'}] + ) +}} + +select ... +``` + + + + +### Other options + +You can specify the character set and collation for the table using `charset` and/or `collation` options. Supported values for `charset` are `binary`, `utf8`, and `utf8mb4`. Supported values for `collation` can be viewed as the output of `SHOW COLLATION` SQL query. Default collations for the corresponding charcter sets are `binary`, `utf8_general_ci`, and `utf8mb4_general_ci`. + + + +```sql +{{ + config( + charset='utf8mb4', + collation='utf8mb4_general_ci' + ) +}} + +select ... +``` + + \ No newline at end of file diff --git a/website/docs/reference/resource-configs/snowflake-configs.md b/website/docs/reference/resource-configs/snowflake-configs.md index d013932cfb0..140a716a53f 100644 --- a/website/docs/reference/resource-configs/snowflake-configs.md +++ b/website/docs/reference/resource-configs/snowflake-configs.md @@ -10,7 +10,7 @@ To-do: ## Transient tables -Snowflake supports the creation of [transient tables](https://docs.snowflake.net/manuals/user-guide/tables-temp-transient.html). Snowflake does not preserve a history for these tables, which can result in a measurable reduction of your Snowflake storage costs. Note however that transient tables do not participate in Time Travel. Weigh these tradeoffs when deciding whether or not to configure your dbt models as `transient`. **By default, all Snowflake tables created by dbt are `transient`.** +Snowflake supports the creation of [transient tables](https://docs.snowflake.net/manuals/user-guide/tables-temp-transient.html). Snowflake does not preserve a history for these tables, which can result in a measurable reduction of your Snowflake storage costs. Transient tables participate in time travel to a limited degree with a retention period of 1 day by default with no fail-safe period. Weigh these tradeoffs when deciding whether or not to configure your dbt models as `transient`. **By default, all Snowflake tables created by dbt are `transient`.** ### Configuring transient tables in dbt_project.yml @@ -51,7 +51,7 @@ select * from ... parameter that can be quite useful later on when searching in the [QUERY_HISTORY view](https://docs.snowflake.com/en/sql-reference/account-usage/query_history.html). dbt supports setting a default query tag for the duration of its Snowflake connections in -[your profile](snowflake-profile). You can set more precise values (and override the default) for subsets of models by setting +[your profile](/reference/warehouse-setups/snowflake-setup). You can set more precise values (and override the default) for subsets of models by setting a `query_tag` model config or by overriding the default `set_query_tag` macro: @@ -93,18 +93,13 @@ In this example, you can set up a query tag to be applied to every query with th ``` -**Note:** query tags are set at the _session_ level. At the start of each model -, if the model has a custom `query_tag` -configured, dbt will run `alter session set query_tag` to set the new value. -At the end of the materialization, dbt will run another `alter` statement to reset -the tag to its default value. As such, build failures midway through a materialization may result in subsequent -queries running with an incorrect tag. +**Note:** query tags are set at the _session_ level. At the start of each model , if the model has a custom `query_tag` configured, dbt will run `alter session set query_tag` to set the new value. At the end of the materialization, dbt will run another `alter` statement to reset the tag to its default value. As such, build failures midway through a materialization may result in subsequent queries running with an incorrect tag. ## Merge behavior (incremental models) -The [`incremental_strategy` config](configuring-incremental-models#about-incremental_strategy) controls how dbt builds incremental models. By default, dbt will use a [merge statement](https://docs.snowflake.net/manuals/sql-reference/sql/merge.html) on Snowflake to refresh incremental tables. +The [`incremental_strategy` config](/docs/build/incremental-models#about-incremental_strategy) controls how dbt builds incremental models. By default, dbt will use a [merge statement](https://docs.snowflake.net/manuals/sql-reference/sql/merge.html) on Snowflake to refresh incremental tables. Snowflake's `merge` statement fails with a "nondeterministic merge" error if the `unique_key` specified in your model config is not actually unique. If you encounter this error, you can instruct dbt to use a two-step incremental approach by setting the `incremental_strategy` config for your model to `delete+insert`. diff --git a/website/docs/reference/resource-configs/spark-configs.md b/website/docs/reference/resource-configs/spark-configs.md index 6e6d7ec0e2d..ffb065df607 100644 --- a/website/docs/reference/resource-configs/spark-configs.md +++ b/website/docs/reference/resource-configs/spark-configs.md @@ -12,13 +12,13 @@ To-do: When materializing a model as `table`, you may include several optional configs that are specific to the dbt-spark plugin, in addition to the standard [model configs](model-configs). -| Option | Description | Required? | Example | -|---------|----------------------------------------------------|-------------------------|--------------------------| -| file_format | The file format to use when creating tables (`parquet`, `delta`, `csv`, `json`, `text`, `jdbc`, `orc`, `hive` or `libsvm`). | Optional | `parquet`| -| location_root | The created table uses the specified directory to store its data. The table alias is appended to it. | Optional | `/mnt/root` | -| partition_by | Partition the created table by the specified columns. A directory is created for each partition. | Optional | `date_day` | -| clustered_by | Each partition in the created table will be split into a fixed number of buckets by the specified columns. | Optional | `country_code` | -| buckets | The number of buckets to create while clustering | Required if `clustered_by` is specified | `8` | +| Option | Description | Required? | Example | +|---------|------------------------------------------------------------------------------------------------------------------------------------|-------------------------|--------------------------| +| file_format | The file format to use when creating tables (`parquet`, `delta`, `hudi`, `csv`, `json`, `text`, `jdbc`, `orc`, `hive` or `libsvm`). | Optional | `parquet`| +| location_root | The created table uses the specified directory to store its data. The table alias is appended to it. | Optional | `/mnt/root` | +| partition_by | Partition the created table by the specified columns. A directory is created for each partition. | Optional | `date_day` | +| clustered_by | Each partition in the created table will be split into a fixed number of buckets by the specified columns. | Optional | `country_code` | +| buckets | The number of buckets to create while clustering | Required if `clustered_by` is specified | `8` | ## Incremental models @@ -30,10 +30,12 @@ When materializing a model as `table`, you may include several optional configs dbt seeks to offer useful, intuitive modeling abstractions by means of its built-in configurations and materializations. Because there is so much variance between Apache Spark clusters out in the world—not to mention the powerful features offered to Databricks users by the Delta file format and custom runtime—making sense of all the available options is an undertaking in its own right. -For that reason, the dbt-spark plugin leans heavily on the [`incremental_strategy` config](configuring-incremental-models#about-incremental_strategy). This config tells the incremental materialization how to build models in runs beyond their first. It can be set to one of three values: +Alternatively, you can use Apache Hudi file format with Apache Spark runtime for building incremental models. + +For that reason, the dbt-spark plugin leans heavily on the [`incremental_strategy` config](/docs/build/incremental-models#about-incremental_strategy). This config tells the incremental materialization how to build models in runs beyond their first. It can be set to one of three values: - **`append`** (default): Insert new records without updating or overwriting any existing data. - **`insert_overwrite`**: If `partition_by` is specified, overwrite partitions in the with new data. If no `partition_by` is specified, overwrite the entire table with new data. - - **`merge`** (Delta Lake only): Match records based on a `unique_key`; update old records, insert new ones. (If no `unique_key` is specified, all new data is inserted, similar to `append`.) + - **`merge`** (Delta and Hudi file format only): Match records based on a `unique_key`; update old records, insert new ones. (If no `unique_key` is specified, all new data is inserted, similar to `append`.) Each of these strategies has its pros and cons, which we'll discuss below. As with any model config, `incremental_strategy` may be specified in `dbt_project.yml` or within a model file's `config()` block. @@ -191,8 +193,9 @@ insert overwrite table analytics.spark_incremental **Usage notes:** The `merge` incremental strategy requires: -- `file_format: delta` -- Databricks Runtime 5.1 and above +- `file_format: delta or hudi` +- Databricks Runtime 5.1 and above for delta file format +- Apache Spark for hudi file format dbt will run an [atomic `merge` statement](https://docs.databricks.com/spark/latest/spark-sql/language-manual/merge-into.html) which looks nearly identical to the default merge behavior on Snowflake and BigQuery. If a `unique_key` is specified (recommended), dbt will update old records with values from new records that match on the key column. If a `unique_key` is not specified, dbt will forgo match criteria and simply insert all new records (similar to `append` strategy). @@ -201,16 +204,16 @@ dbt will run an [atomic `merge` statement](https://docs.databricks.com/spark/lat values={[ { label: 'Source code', value: 'source', }, { label: 'Run code', value: 'run', }, - ] +] }> - + ```sql {{ config( materialized='incremental', - file_format='delta', + file_format='delta', # or 'hudi' unique_key='user_id', incremental_strategy='merge' ) }} @@ -237,10 +240,10 @@ group by 1 - + ```sql -create temporary view delta_incremental__dbt_tmp as +create temporary view merge_incremental__dbt_tmp as with new_events as ( @@ -261,8 +264,8 @@ create temporary view delta_incremental__dbt_tmp as ; -merge into analytics.delta_incremental as DBT_INTERNAL_DEST - using delta_incremental__dbt_tmp as DBT_INTERNAL_SOURCE +merge into analytics.merge_incremental as DBT_INTERNAL_DEST + using merge_incremental__dbt_tmp as DBT_INTERNAL_SOURCE on DBT_INTERNAL_SOURCE.user_id = DBT_INTERNAL_DEST.user_id when matched then update set * when not matched then insert * @@ -297,11 +300,11 @@ use or set `database` as a node config or in the target profile when running dbt If you want to control the schema/database in which dbt will materialize models, use the `schema` config and `generate_schema_name` macro _only_. -## Databricks configurations +## Default file format configurations -To access features exclusive to Databricks runtimes, such as +To access advanced incremental strategies features, such as [snapshots](snapshots) and the `merge` incremental strategy, you will want to -use the Delta file format when materializing models as tables. +use the Delta or Hudi file format as the default file format when materializing models as tables. It's quite convenient to do this by setting a top-level configuration in your project file: @@ -310,13 +313,13 @@ project file: ```yml models: - +file_format: delta + +file_format: delta # or hudi seeds: - +file_format: delta + +file_format: delta # or hudi snapshots: - +file_format: delta + +file_format: delta # or hudi ``` diff --git a/website/docs/reference/resource-configs/sql_header.md b/website/docs/reference/resource-configs/sql_header.md index 90c14d318fa..e56fd4c0f96 100644 --- a/website/docs/reference/resource-configs/sql_header.md +++ b/website/docs/reference/resource-configs/sql_header.md @@ -102,7 +102,7 @@ This uses the config block syntax: ```sql {{ config( - sql_header="alter session set timezone = 'Australia/Sydney'" + sql_header="alter session set timezone = 'Australia/Sydney';" ) }} select * from {{ ref('other_model') }} diff --git a/website/docs/reference/resource-configs/store_failures.md b/website/docs/reference/resource-configs/store_failures.md index 9284547990d..9207e333546 100644 --- a/website/docs/reference/resource-configs/store_failures.md +++ b/website/docs/reference/resource-configs/store_failures.md @@ -10,7 +10,7 @@ datatype: boolean
    -The configured test(s) will never store their failures when `dbt test --store-failures` is invoked. +The configured test(s) will store their failures when `dbt test --store-failures` is invoked. ## Description Optionally set a test to always or never store its failures in the database. @@ -18,7 +18,7 @@ Optionally set a test to always or never store its failures in the database. `store_failures` config will take precedence over the presence or absence of the `--store-failures` flag. - If the `store_failures` config is `none` or omitted, the resource will use the value of the `--store-failures` flag. -This logic is encoded in the [`should_store_failures()`](https://github.com/fishtown-analytics/dbt/blob/98c015b7754779793e44e056905614296c6e4527/core/dbt/include/global_project/macros/materializations/helpers.sql#L77) macro. +This logic is encoded in the [`should_store_failures()`](https://github.com/dbt-labs/dbt-core/blob/98c015b7754779793e44e056905614296c6e4527/core/dbt/include/global_project/macros/materializations/helpers.sql#L77) macro. + ## Examples ### Build all snapshots in a schema named `snapshots` @@ -66,7 +66,7 @@ snapshots: ### Use the same schema-naming behavior as models -Leverage the [`generate_schema_name` macro](using-custom-schemas) to build snapshots in schemas that follow the same naming behavior as your models. +Leverage the [`generate_schema_name` macro](/docs/build/custom-schemas) to build snapshots in schemas that follow the same naming behavior as your models. Notes: * This macro is not available when configuring from the `dbt_project.yml` file, so must be configured in a snapshot config block. diff --git a/website/docs/reference/resource-configs/teradata-configs.md b/website/docs/reference/resource-configs/teradata-configs.md index d1a65928a95..033dec1d94a 100644 --- a/website/docs/reference/resource-configs/teradata-configs.md +++ b/website/docs/reference/resource-configs/teradata-configs.md @@ -208,7 +208,7 @@ As explained in [dbt seeds documentation](https://docs.getdbt.com/docs/building- Since seeds are version controlled, they are best suited to files that contain business-specific logic, for example a list of country codes or user IDs of employees. -Loading CSVs using dbt's seed functionality is not performant for large files. Consider using a different tool to load these CSVs into your data warehouse. +Loading CSVs using dbt's seed functionality is not performant for large files. Consider using a different tool to load these CSVs into your . ::: diff --git a/website/docs/reference/resource-properties/config.md b/website/docs/reference/resource-properties/config.md index 7d251329a0c..636651c45b4 100644 --- a/website/docs/reference/resource-properties/config.md +++ b/website/docs/reference/resource-properties/config.md @@ -1,5 +1,5 @@ --- -resource_types: [models, seeds, snapshots, tests] +resource_types: [models, seeds, snapshots, tests, sources, metrics, exposures] datatype: "{dictionary}" --- @@ -14,6 +14,9 @@ datatype: "{dictionary}" { label: 'Seeds', value: 'seeds', }, { label: 'Snapshots', value: 'snapshots', }, { label: 'Tests', value: 'tests', }, + { label: 'Sources', value: 'sources', }, + { label: 'Metrics', value: 'metrics', }, + { label: 'Exposures', value: 'exposures', }, ] }> @@ -104,6 +107,91 @@ version: 2 + + + + +We have added support for the `config` property on sources in dbt Core v1.1 + + + + + + + +```yml +version: 2 + +sources: + - name: + config: + [](source-configs): + tables: + - name: + config: + [](source-configs): +``` + + + + + + + + + + + +We have added support for the `config` property on sources in dbt Core v1.3 + + + + + + + +```yml +version: 2 + +metrics: + - name: + config: + enabled: true | false +``` + + + + + + + + + + + +Support for the `config` property on `metrics` was added in dbt Core v1.3 + + + + + + + +```yml +version: 2 + +exposures: + - name: + config: + enabled: true | false +``` + + + + + + + The `config` property allows you to configure resources at the same time you're defining properties in yaml files. diff --git a/website/docs/reference/resource-properties/description.md b/website/docs/reference/resource-properties/description.md index 74d165cac3f..9a5342473d7 100644 --- a/website/docs/reference/resource-properties/description.md +++ b/website/docs/reference/resource-properties/description.md @@ -357,7 +357,7 @@ version: 2 models: - name: customers - description: "!\[dbt Logo](https://raw.githubusercontent.com/dbt-labs/dbt/develop/etc/dbt-logo.svg)" + description: "!\[dbt Logo](https://github.com/dbt-labs/dbt-core/blob/main/etc/dbt-core.svg)" columns: - name: customer_id diff --git a/website/docs/reference/resource-properties/docs.md b/website/docs/reference/resource-properties/docs.md deleted file mode 100644 index feee1081324..00000000000 --- a/website/docs/reference/resource-properties/docs.md +++ /dev/null @@ -1,141 +0,0 @@ ---- -resource_types: models -datatype: "{dictionary}" -default_value: {show: true} ---- - - - - - - -```yml -version: 2 - -models: - - name: model_name - docs: - show: true | false - -``` - - - - - - - -This property is not implemented for sources. - - - - - - - -```yml -version: 2 - -seeds: - - name: seed_name - docs: - show: true | false - -``` - - - - - - - - - -```yml -version: 2 - -snapshots: - - name: snapshot_name - docs: - show: true | false - -``` - - - - - - - - - -```yml -version: 2 - -analyses: - - name: analysis_name - docs: - show: true | false -``` - - - - - - - - - - - -```yml -version: 2 - -macros: - - name: macro_name - docs: - show: true | false - -``` - - - - - - - -## Definition -The `docs` field can be used to provide documentation-specific configuration to models. The only currently supported `docs` attribute is `show`, which controls whether or not models are shown in the auto-generated documentation website. - -**Note:** hidden models will still appear in the dbt DAG visualization, but will be identified as "hidden". - - - -* `v0.16.0`: This property was added - - - -## Default -The default value for `show` is `true`. - -## Examples -### Mark a model as hidden - -```yml -models: - - name: sessions__tmp - docs: - show: false -``` diff --git a/website/docs/reference/resource-properties/external.md b/website/docs/reference/resource-properties/external.md index 3943a877e55..a058d4dc685 100644 --- a/website/docs/reference/resource-properties/external.md +++ b/website/docs/reference/resource-properties/external.md @@ -31,11 +31,11 @@ sources: ## Definition An extensible dictionary of metadata properties specific to sources that point to external tables. -There are optional built-in properties, with simple type validation, that roughy correspond to +There are optional built-in properties, with simple type validation, that roughly correspond to the Hive external spec. You may define and use as many additional properties as you'd like. You may wish to define the `external` property in order to: -- Power macros that introspect [`graph.sources`](dbt-jinja-functions/graph) +- Power macros that introspect [`graph.sources`](/reference/dbt-jinja-functions/graph) - Define metadata that you can later extract from the [manifest](manifest-json) For an example of how this property can be used to power custom workflows, see the [`dbt-external-tables`](https://github.com/dbt-labs/dbt-external-tables) package. diff --git a/website/docs/reference/resource-properties/quoting.md b/website/docs/reference/resource-properties/quoting.md index 455e7857018..5cb84f53789 100644 --- a/website/docs/reference/resource-properties/quoting.md +++ b/website/docs/reference/resource-properties/quoting.md @@ -37,8 +37,15 @@ Note that for BigQuery quoting configuration, `database` and `schema` should be ## Default +The default values vary by database. -By default, dbt will _not_ quote the database, schema, or identifier. +For most adapters, quoting is set to _true_ by default. + +Why? It's equally easy to select from relations with quoted or unquoted identifiers. Quoting allows you to use reserved words and special characters in those identifiers, though we recommend avoiding this whenever possible. + +On Snowflake, quoting is set to _false_ by default. + +Creating relations with quoted identifiers also makes those identifiers case sensitive. It's much more difficult to select from them. You can re-enable quoting for relations identifiers that are case sensitive, reserved words, or contain special characters, but we recommend you avoid this as much as possible. ## Example diff --git a/website/docs/reference/resource-properties/tests.md b/website/docs/reference/resource-properties/tests.md index 713db0c56cc..da78376f57b 100644 --- a/website/docs/reference/resource-properties/tests.md +++ b/website/docs/reference/resource-properties/tests.md @@ -148,11 +148,12 @@ This feature is not implemented for analyses. ## Related documentation -* [Testing guide](building-a-dbt-project/tests) + +* [Testing guide](/docs/build/tests) ## Description -The `tests` property defines assertions about a column, , or . The property contains a list of [generic tests](building-a-dbt-project/tests#generic-tests), referenced by name, which can include the four built-in generic tests available in dbt. For example, you can add tests that ensure a column contains no duplicates and zero null values. Any arguments or [configurations](test-configs) passed to those tests should be nested below the test name. +The `tests` property defines assertions about a column, , or . The property contains a list of [generic tests](/docs/build/tests#generic-tests), referenced by name, which can include the four built-in generic tests available in dbt. For example, you can add tests that ensure a column contains no duplicates and zero null values. Any arguments or [configurations](test-configs) passed to those tests should be nested below the test name. Once these tests are defined, you can validate their correctness by running `dbt test`. @@ -182,6 +183,8 @@ models: ### `unique` This test validates that there are no duplicate values present in a field. + +The config and where clause are optional. @@ -193,7 +196,9 @@ models: columns: - name: order_id tests: - - unique + - unique: + config: + where: "order_id > 21" ``` @@ -436,7 +441,7 @@ $ dbt test ### Alternative format for defining tests -When defining a generic test with a number of arguments and configurations, the YAML can look and feel unwieldy. If you find it easier easier, you can define the same test properties as top-level keys of a single dictionary, by providing the test name as `test_name` instead. It's totally up to you. +When defining a generic test with a number of arguments and configurations, the YAML can look and feel unwieldy. If you find it easier, you can define the same test properties as top-level keys of a single dictionary, by providing the test name as `test_name` instead. It's totally up to you. This example is identical to the one above: diff --git a/website/docs/reference/seed-configs.md b/website/docs/reference/seed-configs.md index be9b3bcaec9..b7da013a133 100644 --- a/website/docs/reference/seed-configs.md +++ b/website/docs/reference/seed-configs.md @@ -84,6 +84,7 @@ seeds: [+](plus-prefix)[persist_docs](persist_docs): [+](plus-prefix)[full_refresh](full_refresh): [+](plus-prefix)[meta](meta): {} + [+](plus-prefix)[grants](grants): {} ``` @@ -112,6 +113,7 @@ seeds: [persist_docs](persist_docs): [full_refresh](full_refresh): [meta](meta): {} + [grants](grants): {} ``` @@ -128,7 +130,7 @@ Seed configurations, like model configurations, are applied hierarchically — c ### Examples #### Apply the `schema` configuration to all seeds -To apply a configuration to all seeds, including those in any installed [packages](package-management), nest the configuration directly under the `seeds` key: +To apply a configuration to all seeds, including those in any installed [packages](/docs/build/packages), nest the configuration directly under the `seeds` key: diff --git a/website/docs/reference/seed-properties.md b/website/docs/reference/seed-properties.md index a8f9ad02585..7fbed82dc27 100644 --- a/website/docs/reference/seed-properties.md +++ b/website/docs/reference/seed-properties.md @@ -19,7 +19,7 @@ version: 2 seeds: - name: [description](description): - [docs](resource-properties/docs): + [docs](/reference/resource-configs/docs): show: true | false [config](resource-properties/config): [](seed-configs): diff --git a/website/docs/reference/snapshot-configs.md b/website/docs/reference/snapshot-configs.md index 12eb9cbf2c2..0f0c629658f 100644 --- a/website/docs/reference/snapshot-configs.md +++ b/website/docs/reference/snapshot-configs.md @@ -113,7 +113,7 @@ snapshots: [+](plus-prefix)[pre-hook](pre-hook-post-hook): | [] [+](plus-prefix)[post-hook](pre-hook-post-hook): | [] [+](plus-prefix)[persist_docs](persist_docs): {} - + [+](plus-prefix)[grants](grants): {} ``` @@ -134,6 +134,7 @@ snapshots: [pre-hook](pre-hook-post-hook): | [] [post-hook](pre-hook-post-hook): | [] [persist_docs](persist_docs): {} + [grants](grants): {} ```
    @@ -151,6 +152,7 @@ snapshots: [pre_hook](pre-hook-post-hook)="" | [""], [post_hook](pre-hook-post-hook)="" | [""] [persist_docs](persist_docs)={} + [grants](grants)={} ) }} ``` @@ -171,7 +173,7 @@ Snapshot configurations are applied hierarchically in the order above. ### Examples #### Apply the `target_schema` configuration to all snapshots -To apply a configuration to all snapshots, including those in any installed [packages](package-management), nest the configuration directly under the `snapshots` key: +To apply a configuration to all snapshots, including those in any installed [packages](/docs/build/packages), nest the configuration directly under the `snapshots` key: @@ -250,7 +252,7 @@ You can also define some common configs in a snapshot's `config` block. We don't version: 2 snapshots: - - name: orders_snapshot: + - name: orders_snapshot config: persist_docs: relation: true diff --git a/website/docs/reference/snapshot-properties.md b/website/docs/reference/snapshot-properties.md index f35e08277b1..c4fa4cac218 100644 --- a/website/docs/reference/snapshot-properties.md +++ b/website/docs/reference/snapshot-properties.md @@ -21,7 +21,7 @@ snapshots: - name: [description](description): [meta](meta): {} - [docs](resource-properties/docs): + [docs](/reference/resource-configs/docs): show: true | false [config](resource-properties/config): [](snapshot-configs): diff --git a/website/docs/reference/snowflake-permissions.md b/website/docs/reference/snowflake-permissions.md new file mode 100644 index 00000000000..80dbec25cc8 --- /dev/null +++ b/website/docs/reference/snowflake-permissions.md @@ -0,0 +1,23 @@ +--- +title: "Snowflake Permissions" +--- + +## Example Snowflake permissions + +``` +-- NOTE: warehouse_name, database_name, and role_name are placeholders! +-- Replace as-needed for your organization's naming convention! + +grant all on warehouse warehouse_name to role role_name; +grant usage on database database_name to role role_name; +grant create schema on database database_name to role role_name; +grant usage on schema database.an_existing_schema to role role_name; +grant create table on schema database.an_existing_schema to role role_name; +grant create view on schema database.an_existing_schema to role role_name; +grant usage on future schemas in database database_name to role role_name; +grant select on future tables in database database_name to role role_name; +grant select on future views in database database_name to role role_name; +grant usage on all schemas in database database_name to role role_name; +grant select on all tables in database database_name to role role_name; +grant select on all views in database database_name to role role_name; +``` diff --git a/website/docs/reference/source-configs.md b/website/docs/reference/source-configs.md index e18bb8a23e7..4968593c0f2 100644 --- a/website/docs/reference/source-configs.md +++ b/website/docs/reference/source-configs.md @@ -66,13 +66,13 @@ sources: -Sources can be configured via a `config:` block within their `.yml` definitions, or from the `dbt_project.yml` file under the `sources:` key. This configuration is most useful for configuring sources imported from [a package](package-management). You can disable sources imported from a package to prevent them from rendering in the documentation, or to prevent [source freshness checks](using-sources#snapshotting-source-data-freshness) from running on source tables imported from packages. +Sources can be configured via a `config:` block within their `.yml` definitions, or from the `dbt_project.yml` file under the `sources:` key. This configuration is most useful for configuring sources imported from [a package](/docs/build/packages). You can disable sources imported from a package to prevent them from rendering in the documentation, or to prevent [source freshness checks](/docs/build/sources#snapshotting-source-data-freshness) from running on source tables imported from packages. -Sources can be configured from the `dbt_project.yml` file under the `sources:` key. This configuration is most useful for configuring sources imported from [a package](package-management). You can disable sources imported from a package to prevent them from rendering in the documentation, or to prevent [source freshness checks](using-sources#snapshotting-source-data-freshness) from running on source tables imported from packages. +Sources can be configured from the `dbt_project.yml` file under the `sources:` key. This configuration is most useful for configuring sources imported from [a package](package-management). You can disable sources imported from a package to prevent them from rendering in the documentation, or to prevent [source freshness checks](/docs/build/sources#snapshotting-source-data-freshness) from running on source tables imported from packages. Unlike other resource types, sources do not yet support a `config` property. It is not possible to (re)define source configs hierarchically across multiple yaml files. @@ -80,7 +80,7 @@ Unlike other resource types, sources do not yet support a `config` property. It ### Examples #### Disable all sources imported from a package -To apply a configuration to all sources included from a [package](package-management), +To apply a configuration to all sources included from a [package](/docs/build/packages), state your configuration under the [project name](project-configs/name.md) in the `sources:` config as a part of the resource path. @@ -120,7 +120,7 @@ sources: -You can configure specific source tables, and use [variables](dbt-jinja-functions/var) as the input to that configuration: +You can configure specific source tables, and use [variables](/reference/dbt-jinja-functions/var) as the input to that configuration: diff --git a/website/docs/reference/source-properties.md b/website/docs/reference/source-properties.md index 375eca12fc7..e4e9709ddc1 100644 --- a/website/docs/reference/source-properties.md +++ b/website/docs/reference/source-properties.md @@ -3,7 +3,7 @@ title: Source properties --- ## Related documentation -- [Using sources](using-sources) +- [Using sources](/docs/build/sources) - [Declaring resource properties](configs-and-properties) ## Overview diff --git a/website/docs/reference/test-configs.md b/website/docs/reference/test-configs.md index 885718c2ebb..88255e62189 100644 --- a/website/docs/reference/test-configs.md +++ b/website/docs/reference/test-configs.md @@ -3,7 +3,8 @@ title: Test configurations --- ## Related documentation -* [Tests](building-a-dbt-project/tests) + +* [Tests](/docs/build/tests) diff --git a/website/docs/reference/warehouse-profiles/athena-profile.md b/website/docs/reference/warehouse-profiles/athena-profile.md deleted file mode 100644 index 892c9d89a9f..00000000000 --- a/website/docs/reference/warehouse-profiles/athena-profile.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -title: "Athena Profile" ---- - -## Overview of dbt-athena - -**Maintained by:** Community -**Author:** Tomme -**Source:** [Github](https://github.com/Tomme/dbt-athena) -**dbt Cloud:** Not Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/C013MLFR7BQ) - -The easiest way to install is to use pip: - - pip install git+https://github.com/Tomme/dbt-athena.git - -## Connecting to Athena with dbt-athena - -This plugin does not accept any credentials directly. Instead, [credentials are determined automatically](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html) based on aws cli/boto3 conventions and stored login info. You can configure the AWS profile name to use via aws_profile_name. Checkout dbt profile configuration below for details. - - - -```yaml -default: - outputs: - dev: - type: athena - s3_staging_dir: [s3_staging_dir] - region_name: [region_name] - database: [database name] - schema: [dev_schema] - aws_profile_name: - [optional, profile to use from your AWS shared credentials file.] - - target: dev -``` - - diff --git a/website/docs/reference/warehouse-profiles/azuresynapse-profile.md b/website/docs/reference/warehouse-profiles/azuresynapse-profile.md deleted file mode 100644 index f6bb119298d..00000000000 --- a/website/docs/reference/warehouse-profiles/azuresynapse-profile.md +++ /dev/null @@ -1,178 +0,0 @@ ---- -title: "Microsoft Azure Synapse DW Profile" ---- - - -:::info Community plugin - -Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. - -::: - -## Overview of dbt-synapse - -**Maintained by:** Community -**Author:** Nandan Hegde and Anders Swanson -**Source:** [Github](https://github.com/dbt-msft/dbt-synapse) -**Core version:** v0.18.0 and newer -**dbt Cloud:** Not Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/C01DRQ178LQ) - -![dbt-synapse stars](https://img.shields.io/github/stars/dbt-msft/dbt-synapse?style=for-the-badge) - -The package can be installed from PyPI with: - -```python -pip install dbt-synapse -``` -For further (and more likely up-to-date) info, see the [README](https://github.com/swanderz/dbt-synapse/blob/master/README.md) - -### Connecting to Azure Synapse with **`dbt-synapse`** - -First download and install the [MSFT ODBC Driver 17 for SQL Server](https://docs.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server?view=sql-server-ver15) - -#### standard SQL Server authentication -SQL Server credentials are supported for on-prem as well as cloud, and it is the default authentication method for `dbt-sqlsever` - - - -```yml -type: synapse -driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) -server: server-host-name or ip -port: 1433 -schema: schemaname -user: username -password: password -``` - - - -#### Active Directory Authentication - -The following [`pyodbc`-supported ActiveDirectory methods](https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory?view=sql-server-ver15#new-andor-modified-dsn-and-connection-string-keywords) are available to authenticate to Azure SQL products: -- ActiveDirectory Password -- Azure CLI -- ActiveDirectory Interactive (*Windows only*) -- ActiveDirectory Integrated (*Windows only*) -- Service Principal (a.k.a. AAD Application) -- ~~ActiveDirectory MSI~~ (not implemented) - - - - - -Definitely not ideal, but available - - - -```yml -type: synapse -driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) -server: server-host-name or ip -port: 1433 -schema: schemaname -authentication: ActiveDirectoryPassword -user: bill.gates@microsoft.com -password: iheartopensource -``` - - - - - - - -First, install the [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli), then, log in: - -`az login` - - - -```yml -type: synapse -driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) -server: server-host-name or ip -port: 1433 -schema: schemaname -authentication: CLI -``` -This is also the preferred route for using a service principal: - -`az login --service-principal --username $CLIENTID --password $SECRET --tenant $TENANTID` - - - - - - - -*Windows Only* brings up the Azure AD prompt so you can MFA if need be. - - - -```yml -type: synapse -driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) -server: server-host-name or ip -port: 1433 -schema: schemaname -authentication: ActiveDirectoryInteractive -user: bill.gates@microsoft.com -``` - - - - - - - -*Windows Only* uses your machine's credentials (might be disabled by your AAD admins) - - - -```yml -type: synapse -driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) -server: server-host-name or ip -port: 1433 -schema: schemaname -authentication: ActiveDirectoryIntegrated -``` - - - - - - - -`client_*` and `app_*` can be used interchangeably - - - -```yml -type: synapse -driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) -server: server-host-name or ip -port: 1433 -schema: schemaname -authentication: ServicePrincipal -tenant_id: tenant_id -client_id: clientid -client_secret: clientsecret -``` - - - - - - diff --git a/website/docs/reference/warehouse-profiles/clickhouse-profile.md b/website/docs/reference/warehouse-profiles/clickhouse-profile.md deleted file mode 100644 index 200837e595a..00000000000 --- a/website/docs/reference/warehouse-profiles/clickhouse-profile.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: "ClickHouse Profile" ---- - -:::info Community plugin - -Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. - -::: - -## Overview of dbt-clickhouse -**Maintained by:** Community -**Author:** Dmitriy Sokolov -**Source:** https://github.com/silentsokolov/dbt-clickhouse -**Core version:** v0.19.0 and newer -**dbt Cloud:** Not Supported - -![dbt-clickhouse stars](https://img.shields.io/github/stars/silentsokolov/dbt-clickhouse?style=for-the-badge) - -The easiest way to install it is to use pip: - - pip install dbt-clickhouse - -## Connecting to ClickHouse with **dbt-clickhouse** - -#### User / password authentication - -Configure your dbt profile for using ClickHouse: - -##### ClickHouse connection information - - -```yaml -dbt-clickhouse: - target: dev - outputs: - dev: - type: clickhouse - schema: [database name] - host: [db.clickhouse.com] - port: 9000 - user: [user] - password: [abc123] -``` - - diff --git a/website/docs/reference/warehouse-profiles/databricks-profile.md b/website/docs/reference/warehouse-profiles/databricks-profile.md deleted file mode 100644 index 6f6fe41e70a..00000000000 --- a/website/docs/reference/warehouse-profiles/databricks-profile.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -title: "Databricks Profile" -id: "databricks-profile" ---- - -## Overview of dbt-databricks - -**Maintained by:** some dbt loving Bricksters -**Author:** Databricks -**Source:** [Github](https://github.com/databricks/dbt-databricks) -**dbt Cloud:** Coming Soon -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/CNGCW8HKL) - -![dbt-databricks stars](https://img.shields.io/github/stars/databricks/dbt-databricks?style=for-the-badge) - -## Installation and Distribution - -The easiest way to install dbt-databricks is to use `pip`: - -```zsh -pip install dbt-databricks -``` - -### Set up a Databricks Target - -dbt-databricks can connect to Databricks all-purpose clusters as well as SQL endpoints. -The latter provides an opinionated way of running SQL workloads with optimal performance and -price, the former provides all the flexibility of Spark. - - - -```yaml -your_profile_name: - target: dev - outputs: - dev: - type: databricks - schema: [schema name] - host: [yourorg.databrickshost.com] - http_path: [/sql/your/http/path] - token: [dapiXXXXXXXXXXXXXXXXXXXXXXX] # Personal Access Token (PAT) -``` - - - -See the [Databricks documentation](https://docs.databricks.com/dev-tools/dbt.html#) on how -to obtain the credentials for configuring your profile. - -## Caveats - -### Supported Functionality - -Most dbt Core functionality is supported, but some features are only available -on Delta Lake. - -Delta-only features: -1. Incremental model updates by `unique_key` instead of `partition_by` (see [`merge` strategy](spark-configs#the-merge-strategy)) -2. [Snapshots](https://docs.getdbt.com/docs/building-a-dbt-project/snapshots) - -### Choosing between dbt-databricks and dbt-spark - -While `dbt-spark` can be used to connect to Databricks, `dbt-databricks` was created to make it -even easier to use dbt with the Databricks Lakehouse. - -`dbt-databricks` includes: -- No need to install additional drivers or dependencies for use on the CLI -- Use of Delta Lake for all models out of the box -- SQL macros that are optimzed to run with [Photon](https://docs.databricks.com/runtime/photon.html) diff --git a/website/docs/reference/warehouse-profiles/dremio-profile.md b/website/docs/reference/warehouse-profiles/dremio-profile.md deleted file mode 100644 index d156f815117..00000000000 --- a/website/docs/reference/warehouse-profiles/dremio-profile.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: "Dremio Profile" ---- - -:::info Community plugin - -Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. - -::: - -## Overview of dbt-dremio -**Maintained by:** Community -**Author:** Fabrice Etanchaud (Maif-vie) -**Source:** https://github.com/fabrice-etanchaud/dbt-dremio -**Core version:** v0.18.0 and newer -**dbt Cloud:** Not Supported - -![dbt-dremio stars](https://img.shields.io/github/stars/fabrice-etanchaud/dbt-dremio?style=for-the-badge) - -The easiest way to install it is to use pip: - - pip install dbt-dremio - -Follow the repository's link for os dependencies. - -## Connecting to Dremio with **dbt-dremio** - -### Connecting with ZooKeeper - -I have no means to test [connection with ZooKeeper](https://docs.dremio.com/drivers/dremio-connector.html#connecting-to-zookeeper). -If you do need this, contact me and I will provide you with a branch you can test. - -### Direct connection to a coordinator - -```yaml -my_profile: - outputs: - my_target: - type: dremio - threads: 2 -# please replace driver below with the one you gave to your dremio odbc driver installation - driver: Dremio ODBC Driver 64-bit - host: [coordinator host] - port: 31010 - schema: [schema] - user: [user] - password: [password] - target: my_target diff --git a/website/docs/reference/warehouse-profiles/exasol-profile.md b/website/docs/reference/warehouse-profiles/exasol-profile.md deleted file mode 100644 index 5155ece4007..00000000000 --- a/website/docs/reference/warehouse-profiles/exasol-profile.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: "Exasol Profile" ---- - -:::info Community plugin - -Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. - -::: - -## Overview of dbt-exasol -**Maintained by:** Community -**Author:** Torsten Glunde, Ilija Kutle -**Source:** https://github.com/tglunde/dbt-exasol -**Core version:** v0.14.0 and newer -**dbt Cloud:** Not Supported - -![dbt-exasol stars](https://img.shields.io/github/stars/tglunde/dbt-exasol?style=for-the-badge) - -Easiest install is to use pip: - - pip install dbt-exasol - -### Connecting to Exasol with **dbt-exasol** - -#### User / password authentication - -Configure your dbt profile for using Exasol: - -##### Exasol connection information - - -```yaml -dbt-exasol: - target: dev - outputs: - dev: - type: exasol - threads: 1 - dsn: HOST:PORT - user: USERNAME - password: PASSWORD - dbname: db - schema: SCHEMA -``` - - diff --git a/website/docs/reference/warehouse-profiles/materialize-profile.md b/website/docs/reference/warehouse-profiles/materialize-profile.md deleted file mode 100644 index 39e52a568d9..00000000000 --- a/website/docs/reference/warehouse-profiles/materialize-profile.md +++ /dev/null @@ -1,92 +0,0 @@ ---- -title: "Materialize Profile" ---- - -:::info Vendor-supported plugin - -Certain core functionality may vary. If you would like to report a bug, request a feature, or contribute, you can check out the linked repository and open an issue. - -::: - -## Overview of dbt-materialize - -**Maintained by:** Materialize, Inc. -**Source:** [Github](https://github.com/MaterializeInc/materialize/blob/main/misc/dbt-materialize) -**Core version:** v0.18.1 and newer -**dbt Cloud:** Not Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/C01PWAH41A5) - -## Installation and distribution - -The `dbt-materialize` adapter is managed in the core [Materialize repository](https://github.com/MaterializeInc/materialize/blob/main/misc/dbt-materialize). To get started, install `dbt-materialize` using `pip` (and optionally a virtual environment): - -``` -python3 -m venv dbt-venv # create the virtual environment -source dbt-venv/bin/activate # activate the virtual environment -pip install dbt-materialize # install the adapter -``` - -To check that the adapter was successfully installed, run: - -``` -dbt --version -``` - -You should see `materialize` listed under “Plugins”. If this is not the case, double-check that the virtual environment is activated! - -## Connecting to Materialize - -Once you have Materialize [installed and running](https://materialize.com/docs/install/), adapt your `profiles.yml` to connect to your instance using the following reference profile configuration: - - - -```yaml -dbt-materialize: - target: dev - outputs: - dev: - type: materialize - threads: 1 - host: [host] - port: [port] - user: [user] - pass: [password] - dbname: [database] - schema: [name of your dbt schema] -``` - - - -To test the connection to Materialize, run: - -``` -dbt debug -``` - -If the output reads "All checks passed!", you’re good to go! Check the [dbt and Materialize guide](https://materialize.com/docs/guides/dbt/) to learn more and get started. - -## Supported Features - -### Materializations - -Because Materialize is optimized for transformations on streaming data and the core of dbt is built around batch, the `dbt-materialize` adapter implements a few custom materialization types: - -Type | Supported? | Details ------|------------|---------------- -`source` | YES | Creates a [source](https://materialize.com/docs/sql/create-source/). -`view` | YES | Creates a [view](https://materialize.com/docs/sql/create-view/#main). -`materializedview` | YES | Creates a [materialized view](https://materialize.com/docs/sql/create-materialized-view/#main). -`table` | YES | Creates a [materialized view](https://materialize.com/docs/sql/create-materialized-view/#main). (Actual table support pending [#5266](https://github.com/MaterializeInc/materialize/issues/5266)) -`index` | YES | (Deprecated) Creates an index. Use the [`indexes` config](materialize-configs#indexes) to create indexes on `materializedview`, `view` or `source` relations instead. -`sink` | YES | Creates a [sink](https://materialize.com/docs/sql/create-sink/#main). -`ephemeral` | YES | Executes queries using CTEs. -`incremental` | NO | Use the `materializedview` instead. Materialized views will always return up-to-date results without manual or configured refreshes. For more information, check out [Materialize documentation](https://materialize.com/docs/). - -### Seeds - -Running [`dbt seed`](commands/seed) will create a static materialized from a CSV file. You will not be able to add to or update this view after it has been created. If you want to rerun `dbt seed`, you must first drop existing views manually with `drop view`. - -## Resources - -- [dbt and Materialize guide](https://materialize.com/docs/guides/dbt/) -- [Get started](https://github.com/MaterializeInc/demos/tree/main/dbt-get-started) using dbt and Materialize together diff --git a/website/docs/reference/warehouse-profiles/mssql-profile.md b/website/docs/reference/warehouse-profiles/mssql-profile.md deleted file mode 100644 index 582f7a85b9d..00000000000 --- a/website/docs/reference/warehouse-profiles/mssql-profile.md +++ /dev/null @@ -1,202 +0,0 @@ ---- -title: "Microsoft SQL Server Profile" ---- - -:::info Community plugin - -Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. - -::: - -## Overview of dbt-sqlserver - -**Maintained by:** Community -**Author:** Mikael Ene -**Source:** [Github](https://github.com/dbt-msft/dbt-sqlserver) -**Core version:** v0.14.0 and newer -**dbt Cloud:** Not Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/CMRMDDQ9W) - -![dbt-sqlserver stars](https://img.shields.io/github/stars/mikaelene/dbt-sqlserver?style=for-the-badge) - -The package can be installed from PyPI with: - -```python -pip install dbt-sqlserver -``` -On Ubuntu make sure you have the ODBC header files before installing - - sudo apt install unixodbc-dev - -### Connecting to SQL Server with **dbt-sqlserver** - -#### standard SQL Server authentication -SQL Server credentials are supported for on-prem as well as cloud, and it is the default authentication method for `dbt-sqlsever` - - - -```yml -your_profile_name: - target: dev - outputs: - dev: - type: sqlserver - driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) - server: server-host-name or ip - port: 1433 - schema: schemaname - user: username - password: password -``` - - - -#### Active Directory Authentication - -The following [`pyodbc`-supported ActiveDirectory methods](https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory?view=sql-server-ver15#new-andor-modified-dsn-and-connection-string-keywords) are available to authenticate to Azure SQL products: -- ActiveDirectory Password -- Azure CLI -- ActiveDirectory Interactive (*Windows only*) -- ActiveDirectory Integrated (*Windows only*) -- Service Principal (a.k.a. AAD Application) -- ~~ActiveDirectory MSI~~ (not implemented) - - - - - -Definitely not ideal, but available - - - -```yml -your_profile_name: - target: dev - outputs: - dev: - type: sqlserver - driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) - server: server-host-name or ip - port: 1433 - schema: schemaname - authentication: ActiveDirectoryPassword - user: bill.gates@microsoft.com - password: iheartopensource -``` - - - - - - - -First, install the [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli), then, log in: - -`az login` - - - -```yml -your_profile_name: - target: dev - outputs: - dev: - type: sqlserver - driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) - server: server-host-name or ip - port: 1433 - schema: schemaname - authentication: CLI -``` -This is also the preferred route for using a service principal: - -`az login --service-principal --username $CLIENTID --password $SECRET --tenant $TENANTID` - - - - - - - -*Windows Only* brings up the Azure AD prompt so you can MFA if need be. - - - -```yml -your_profile_name: - target: dev - outputs: - dev: - type: sqlserver - driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) - server: server-host-name or ip - port: 1433 - schema: schemaname - authentication: ActiveDirectoryInteractive - user: bill.gates@microsoft.com -``` - - - - - - - -*Windows Only* uses your machine's credentials (might be disabled by your AAD admins) - - - -```yml -your_profile_name: - target: dev - outputs: - dev: - type: sqlserver - driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) - server: server-host-name or ip - port: 1433 - schema: schemaname - authentication: ActiveDirectoryIntegrated -``` - - - - - - - -`client_*` and `app_*` can be used interchangeably - - - -```yml -your_profile_name: - target: dev - outputs: - dev: - type: sqlserver - driver: 'ODBC Driver 17 for SQL Server' (The ODBC Driver installed on your system) - server: server-host-name or ip - port: 1433 - schema: schemaname - authentication: ServicePrincipal - tenant_id: tenant_id - client_id: clientid - client_secret: clientsecret -``` - - - - - - - diff --git a/website/docs/reference/warehouse-profiles/oracle-profile.md b/website/docs/reference/warehouse-profiles/oracle-profile.md deleted file mode 100644 index cec919775fd..00000000000 --- a/website/docs/reference/warehouse-profiles/oracle-profile.md +++ /dev/null @@ -1,201 +0,0 @@ ---- -title: "Oracle Profile" ---- - -## Overview of dbt-oracle - -**Maintained by:** Oracle -**Source:** [Github](https://github.com/oracle/dbt-oracle) -**Core version:** v1.0.6 -**dbt Cloud:** Not Supported -**dbt Slack channel** [#db-oracle](https://getdbt.slack.com/archives/C01PWH4TXLY) - -![dbt-oracle stars](https://img.shields.io/github/stars/oracle/dbt-oracle?style=for-the-badge) - -## Installation - -dbt-oracle can be installed via the Python Package Index (PyPI) using pip - - pip install dbt-oracle - -You will need Oracle client driver installed. Check this [link](https://cx-oracle.readthedocs.io/en/latest/user_guide/installation.html) for the installation guide for your operating system - -## Connecting to Oracle Database - -Define the following mandatory parameters as environment variables and refer them in the connection profile using [env_var](https://docs.getdbt.com/reference/dbt-jinja-functions/env_var) jinja function. Optionally, you can also define these directly in the `profiles.yml` file, but this is not recommended - -```bash -export DBT_ORACLE_USER= -export DBT_ORACLE_PASSWORD=*** -export DBT_ORACLE_DATABASE=ga01d78d2ecd5f1_db202112221108 -export DBT_ORACLE_SCHEMA= -``` - - -An Oracle connection profile for dbt can be set using any one of the following methods - - - - - -To connect using the database hostname or IP address, you need to specify the following -- host -- port (1521 or 1522) -- protocol (tcp or tcps) -- service - -```bash -export DBT_ORACLE_HOST=adb.us-ashburn-1.oraclecloud.com -export DBT_ORACLE_SERVICE=ga01d78d2ecd5f1_db202112221108_high.adb.oraclecloud.com -``` - - - -```yaml -dbt_test: - target: "{{ env_var('DBT_TARGET', 'dev') }}" - outputs: - dev: - type: oracle - user: "{{ env_var('DBT_ORACLE_USER') }}" - pass: "{{ env_var('DBT_ORACLE_PASSWORD') }}" - protocol: "tcps" - host: "{{ env_var('DBT_ORACLE_HOST') }}" - port: 1522 - service: "{{ env_var('DBT_ORACLE_SERVICE') }}" - database: "{{ env_var('DBT_ORACLE_DATABASE') }}" - schema: "{{ env_var('DBT_ORACLE_SCHEMA') }}" - threads: 4 -``` - - - - - -The `tnsnames.ora` file is a configuration file that contains network service names mapped to connect descriptors. -The directory location of `tnsnames.ora` file can be specified using `TNS_ADMIN` environment variable - - - -```text -net_service_name= - (DESCRIPTION= - (ADDRESS=(PROTOCOL=TCP)(HOST=dbhost.example.com)(PORT=1521)) - (CONNECT_DATA=(SERVICE_NAME=orclpdb1))) -``` - - - -The `net_service_name` can be defined as environment variable and referred in `profiles.yml` - -```bash -export DBT_ORACLE_TNS_NAME=net_service_name -``` - - - -```yaml -dbt_test: - target: dev - outputs: - dev: - type: oracle - user: "{{ env_var('DBT_ORACLE_USER') }}" - pass: "{{ env_var('DBT_ORACLE_PASSWORD') }}" - database: "{{ env_var('DBT_ORACLE_DATABASE') }}" - tns_name: "{{ env_var('DBT_ORACLE_TNS_NAME') }}" - schema: "{{ env_var('DBT_ORACLE_SCHEMA') }}" - threads: 4 -``` - - - - - -The connection string identifies which database service to connect to. It can be one of the following - -- An [Oracle Easy Connect String](https://docs.oracle.com/en/database/oracle/oracle-database/21/netag/configuring-naming-methods.html#GUID-B0437826-43C1-49EC-A94D-B650B6A4A6EE) -- An Oracle Net Connect Descriptor String -- A Net Service Name mapping to a connect descriptor - -```bash -export DBT_ORACLE_CONNECT_STRING="(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=dbhost.example.com)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=orclpdb1)))" -``` - - - -```yaml -dbt_test: - target: "{{ env_var('DBT_TARGET', 'dev') }}" - outputs: - dev: - type: oracle - user: "{{ env_var('DBT_ORACLE_USER') }}" - pass: "{{ env_var('DBT_ORACLE_PASSWORD') }}" - database: "{{ env_var('DBT_ORACLE_DATABASE') }}" - schema: "{{ env_var('DBT_ORACLE_SCHEMA') }}" - connection_string: "{{ env_var('DBT_ORACLE_CONNECT_STRING') }}" -``` - - - - - - -## Connecting to Oracle Autonomous Database in Cloud - -To enable connection to Oracle Autonomous Database in Oracle Cloud, a wallet needs be downloaded from the cloud, and cx_Oracle needs to be configured to use it. The wallet gives mutual TLS which provides enhanced security for authentication and encryption. A database username and password is still required for your application connections. - -### Install the Wallet and Network Configuration Files - -From the Oracle Cloud console for the database, download the wallet zip file. It contains the wallet and network configuration files. Note: keep wallet files in a secure location and share them only with authorized users. - -Unzip the wallet zip file. For cx_Oracle, only these files from the zip are needed: - -- `tnsnames.ora` - Maps net service names used for application connection strings to your database services -- `sqlnet.ora` - Configures Oracle Network settings -- `cwallet.sso` - Enables SSL/TLS connections - -After downloading the wallet, put the unzipped wallet files in a secure directory and set the TNS_ADMIN environment variable to that directory name. Next, edit the sqlnet.ora file to point to the wallet directory. - - - -```text -WALLET_LOCATION = (SOURCE = (METHOD = file) (METHOD_DATA = (DIRECTORY="/path/to/wallet/directory"))) -SSL_SERVER_DN_MATCH=yes -``` - - - -:::info TLS v/s mTLS - -If you have enabled TLS connections on your Database instance then dbt can connect using only database username, password and the Oracle Net connect name given in the unzipped tnsnames.ora file. - -::: - - -## Supported Features - -- Table materialization -- View materialization -- Incremental materialization -- Seeds -- Data sources -- Singular tests -- Generic tests; Not null, Unique, Accepted values and Relationships -- Operations -- Analyses -- Exposures -- Document generation -- Serve project documentation as a website -- All dbt commands are supported - -## Not Supported features -- Ephemeral materialization - diff --git a/website/docs/reference/warehouse-profiles/postgres-profile.md b/website/docs/reference/warehouse-profiles/postgres-profile.md deleted file mode 100644 index f4e3c30c5a3..00000000000 --- a/website/docs/reference/warehouse-profiles/postgres-profile.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -title: "Postgres Profile" ---- - -## Overview of dbt-postgres -**Maintained by:** core dbt maintainers -**Author:** dbt Labs -**dbt Cloud:** Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/C0172G2E273) - -## Profile Configuration - -Postgres targets should be set up using the following configuration in your `profiles.yml` file. - - - -```yaml -company-name: - target: dev - outputs: - dev: - type: postgres - host: [hostname] - user: [username] - password: [password] - port: [port] - dbname: [database name] - schema: [dbt schema] - threads: [1 or more] - keepalives_idle: 0 # default 0, indicating the system default. See below - connect_timeout: 10 # default 10 seconds - search_path: [optional, override the default postgres search_path] - role: [optional, set the role dbt assumes when executing queries] - sslmode: [optional, set the sslmode used to connect to the database] - -``` - - - -### Configurations - -#### search_path - -The `search_path` config controls the Postgres "search path" that dbt configures when opening new connections to the database. By default, the Postgres search path is `"$user, public"`, meaning that unqualified names will be searched for in the `public` schema, or a schema with the same name as the logged-in user. **Note:** Setting the `search_path` to a custom value is not necessary or recommended for typical usage of dbt. - -#### role - - Added in v0.16.0 - -The `role` config controls the Postgres role that dbt assumes when opening new connections to the database. - -#### sslmode - - Added in v0.16.0 - -The `sslmode` config controls how dbt connectes to Postgres databases using SSL. See [the Postgres docs](https://www.postgresql.org/docs/9.1/libpq-ssl.html) on `sslmode` for usage information. When unset, dbt will connect to databases using the Postgres default, `prefer`, as the `sslmode`. - -### Postgres notes -### Performance -While Postgres works reasonably well for datasets smaller than about 10mm rows, database tuning is sometimes required. Make sure to create indexes for columns that are commonly used in joins or where clauses. - -### `keepalives_idle` -If the database closes its connection while dbt is waiting for data, you may see the error `SSL SYSCALL error: EOF detected`. Lowering the [`keepalives_idle` value](https://www.postgresql.org/docs/9.3/libpq-connect.html) may prevent this, because the server will send a ping to keep the connection active more frequently. - -[dbt's default setting](https://github.com/dbt-labs/dbt-core/blob/main/plugins/postgres/dbt/adapters/postgres/connections.py#L28) is 0 (the server's default value), but can be configured lower (perhaps 120 or 60 seconds), at the cost of a chattier network connection. \ No newline at end of file diff --git a/website/docs/reference/warehouse-profiles/rockset-profile.md b/website/docs/reference/warehouse-profiles/rockset-profile.md deleted file mode 100644 index 4f5431c7f15..00000000000 --- a/website/docs/reference/warehouse-profiles/rockset-profile.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -title: "Rockset Profile" ---- - -:::info Vendor-supported plugin - -Certain core functionality may vary. If you would like to report a bug, request a feature, or contribute, you can check out the linked repository and open an issue. - -::: - -## Overview of dbt-rockset - -**Maintained by:** Rockset, Inc. -**Source:** [Github](https://github.com/rockset/dbt-rockset) -**Core version:** v0.19.2 and newer -**dbt Cloud:** Not Supported -**dbt Slack channel:** [Slack](https://getdbt.slack.com/archives/C02J7AZUAMN) - -The easiest way to install is to use pip: - - pip install dbt-rockset - -## Connecting to Rockset with **dbt-rockset** - -The dbt profile for Rockset is very simple and contains the following fields: - - - -```yaml -rockset: - target: dev - outputs: - dev: - type: rockset - workspace: [schema] - api_key: [api_key] - api_server: [api_server] # (Default is api.rs2.usw2.rockset.com) -``` - - - -### Materializations - -Type | Supported? | Details ------|------------|---------------- -view | YES | Creates a [view](https://rockset.com/docs/views/#gatsby-focus-wrapper). -table | YES | Creates a [collection](https://rockset.com/docs/collections/#gatsby-focus-wrapper). -ephemeral | YES | Executes queries using CTEs. -incremental | YES | Creates a [collection](https://rockset.com/docs/collections/#gatsby-focus-wrapper) if it doesn't exist, and then writes results to it. - -## Caveats -1. `unique_key` is not supported with incremental, unless it is set to [_id](https://rockset.com/docs/special-fields/#the-_id-field), which acts as a natural `unique_key` in Rockset anyway. -2. The `table` is slower in Rockset than most due to Rockset's architecture as a low-latency, real-time database. Creating new collections requires provisioning hot storage to index and serve fresh data, which takes about a minute. -3. Rockset queries have a two-minute timeout. Any model which runs a query that takes longer to execute than two minutes will fail. diff --git a/website/docs/reference/warehouse-profiles/tidb-profile.md b/website/docs/reference/warehouse-profiles/tidb-profile.md deleted file mode 100644 index da47e7ef16a..00000000000 --- a/website/docs/reference/warehouse-profiles/tidb-profile.md +++ /dev/null @@ -1,98 +0,0 @@ ---- -title: "TiDB Profile" ---- - -:::info Vendor-supported plugin - -Some [core functionality](https://github.com/pingcap/dbt-tidb/blob/main/README.md#supported-features) may be limited. -If you're interested in contributing, check out the source code repository listed below. - -::: - -## Overview of dbt-tidb - -**Maintained by:** PingCAP -**Author:** Xiang Zhang and Qiang Wu -**Source:** https://github.com/pingcap/dbt-tidb -**Core version:** v1.0.0 and newer -**dbt Cloud:** Not Supported - -The easiest way to install it is to use pip: - -``` -pip install dbt-tidb -``` - -### Connecting to TiDB with **dbt-tidb** - -#### User / Password Authentication - -Configure your dbt profile for using TiDB: - -##### TiDB connection profile - - -```yaml -dbt-tidb: - target: dev - outputs: - dev: - type: tidb - server: 127.0.0.1 - port: 4000 - schema: database_name - username: tidb_username - password: tidb_password -``` - - - -##### Description of Profile Fields - -| Option | Description | Required? | Example | -| --------------- | ------------------------------------------------------ |-----------|--------------------| -| type | The specific adapter to use | Required | `tidb` | -| server | The server (hostname) to connect to | Required | `yourorg.tidb.com` | -| port | The port to use | Required | `4000` | -| schema | Specify the schema (database) to build models into | Required | `analytics` | -| username | The username to use to connect to the server | Required | `dbt_admin` | -| password | The password to use for authenticating to the server | Required | `awesome_password` | - -#### Database User Privileges - -Your database user would be able to have some abilities to read or write, such as `SELECT`, `CREATE`, and so on. -You can find some help [here](https://docs.pingcap.com/tidb/v4.0/privilege-management) with TiDB privileges management. - -| Required Privilege | -|------------------------| -| SELECT | -| CREATE | -| CREATE TEMPORARY TABLE | -| CREATE VIEW | -| INSERT | -| DROP | -| SHOW DATABASE | -| SHOW VIEW | -| SUPER | - -### Supported features - -| TiDB 4.X | TiDB 5.0 ~ 5.2 | TiDB >= 5.3 | Feature | -|:--------------:|:--------------:|:----------------:|:------------------------------:| -| ✅ | ✅ | ✅ | Table materialization | -| ✅ | ✅ | ✅ | View materialization | -| ✅ | ✅ | ✅ | Incremental materialization | -| ❌ | ❌ | ✅ | Ephemeral materialization | -| ✅ | ✅ | ✅ | Seeds | -| ✅ | ✅ | ✅ | Sources | -| ✅ | ✅ | ✅ | Custom data tests | -| ✅ | ✅ | ✅ | Docs generate | -| ❌ | ❌ | ✅ | Snapshots | - -**Note:** - -* TiDB 4.0 ~ 5.0 does not support [CTE](https://docs.pingcap.com/tidb/dev/sql-statement-with), - you should avoid using `WITH` in your SQL code. -* TiDB 4.0 ~ 5.2 does not support creating a [temporary table or view](https://docs.pingcap.com/tidb/v5.2/sql-statement-create-table#:~:text=sec\)-,MySQL%20compatibility,-TiDB%20does%20not). -* TiDB 4.X does not support using SQL func in `CREATE VIEW`, avoid it in your SQL code. - You can find more detail [here](https://github.com/pingcap/tidb/pull/27252). diff --git a/website/docs/reference/warehouse-profiles/trino-profile.md b/website/docs/reference/warehouse-profiles/trino-profile.md deleted file mode 100644 index 03deafe599b..00000000000 --- a/website/docs/reference/warehouse-profiles/trino-profile.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -title: "Starburst & Trino Profile" ---- - -:::info Vendor-supported plugin - -Certain core functionality may vary. If you would like to report a bug, request a feature, or contribute, you can check out the linked repository and open an issue. - -::: - -## Overview of dbt-trino - -**Maintained by:** Starburst Data, Inc. -**Source:** [Github](https://github.com/starburstdata/dbt-trino) -**Core version:** v0.20.0 and newer -**dbt Cloud:** Not Supported -**dbt Slack channel:** [Slack](https://getdbt.slack.com/archives/CNNPBQ24R) - -![dbt-presto stars](https://img.shields.io/github/stars/starburstdata/dbt-trino?style=for-the-badge) - -## Installation and Distribution - -dbt's Trino adapter is managed in its own repository, [dbt-trino](https://github.com/starburstdata/dbt-trino). To use the Trino adapter, you must install the `dbt-trino` plugin: - -### Using pip -The following command will install the latest version of `dbt-trino` as well as the requisite version of `dbt-core`: - -``` -pip install dbt-trino -``` - - -## Set up a Trino Target - -Trino targets should be set up using the following configuration in your `profiles.yml` file. - - - -```yaml -trino: - target: dev - outputs: - dev: - type: trino - method: none # optional, one of {none | ldap | kerberos} - user: [user] - password: [password] # required if method is ldap or kerberos - database: [database name] - host: [hostname] - port: [port number] - schema: [your dbt schema] - threads: [1 or more] - http_scheme: [http or https] - session_properties: - query_max_run_time: 5d - exchange_compression: True - - -``` - - - -## Incremental models - -The incremental strategy supported by the adapter is to append new records without updating/overwriting any existing data from the target model. - -## Caveats - -### Unsupported Functionality - -Due to the nature of Trino, not all core dbt functionality is supported. The following features of dbt are not implemented on Trino: - -1. [Snapshots](snapshots) - diff --git a/website/docs/reference/warehouse-profiles/vertica-profile.md b/website/docs/reference/warehouse-profiles/vertica-profile.md deleted file mode 100644 index 5ce1b479976..00000000000 --- a/website/docs/reference/warehouse-profiles/vertica-profile.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -title: "Vertica Profile" ---- - -:::info Community plugin - -Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. - -::: - -## Overview of dbt-vertica -**Maintained by:** Community -**Authors:** Matthew Carter, Andy Regan, Andrew Hedengren -**Source:** [Github](https://github.com/mpcarter/dbt-vertica) -**Core version:** v0.21.0 and newer -**dbt Cloud:** Not Supported - -![dbt-vertica stars](https://img.shields.io/github/stars/mpcarter/dbt-vertica) - -Easiest install is to use pip: - - pip install dbt-vertica - -You don't need to install dbt separately. Installing `dbt-vertica` will also install `dbt-core` and `vertica-python`. - -### Connecting to Vertica with **dbt-vertica** - -#### Username / password authentication - -Configure your dbt profile for using Vertica: - -##### Vertica connection information - - -```yaml -your-profile: - outputs: - dev: - type: vertica # Don't change this! - host: vertica-host-name - port: 5433 # or your custom port (optional) - username: your-username - password: your-password - database: vertica-database-name - schema: your-default-schema - target: dev -``` - - - -By default, `dbt-vertica` will request `ConnectionLoadBalance=true` (which is generally a good thing), and set a session label of `dbt_your-username`. - -There are three options for SSL: `ssl`, `ssl_env_cafile`, and `ssl_uri`. -See their use in the code [here](https://github.com/mpcarter/dbt-vertica/blob/d15f925049dabd2833b4d88304edd216e3f654ed/dbt/adapters/vertica/connections.py#L72-L87). diff --git a/website/docs/reference/warehouse-setups/alloydb-setup.md b/website/docs/reference/warehouse-setups/alloydb-setup.md new file mode 100644 index 00000000000..b65b0759eed --- /dev/null +++ b/website/docs/reference/warehouse-setups/alloydb-setup.md @@ -0,0 +1,32 @@ +--- +title: "AlloyDB setup" +meta: + maintained_by: Community? + authors: 'dbt-labs' + github_repo: 'dbt-labs/dbt-postgres' + pypi_package: 'dbt-postgres' + min_core_version: 'v1.0.0' + cloud_support: Not Supported + min_supported_version: '?' + slack_channel_name: '#db-postgres' + slack_channel_link: 'https://getdbt.slack.com/archives/C0172G2E273' + platform_name: 'AlloyDB' + config_page: 'postgres-configs' +--- + +## Overview of AlloyDB support + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +## Profile Configuration + +AlloyDB targets are configured exactly the same as [Postgres targets](postgres-setup#profile-configuration). diff --git a/website/docs/reference/warehouse-setups/athena-setup.md b/website/docs/reference/warehouse-setups/athena-setup.md new file mode 100644 index 00000000000..e95b89d69f4 --- /dev/null +++ b/website/docs/reference/warehouse-setups/athena-setup.md @@ -0,0 +1,65 @@ +--- +title: "Athena setup" +meta: + maintained_by: Community + authors: 'Tomme' + github_repo: 'Tomme/dbt-athena' + pypi_package: 'dbt-athena-adapter' + min_core_version: 'v1.0.1' + cloud_support: Not Supported + min_supported_version: 'engine version 2' + slack_channel_name: '#db-athena' + slack_channel_link: 'https://getdbt.slack.com/archives/C013MLFR7BQ' + platform_name: 'Athena' + config_page: 'no-configs' +--- + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +## Connecting to Athena with dbt-athena + +This plugin does not accept any credentials directly. Instead, [credentials are determined automatically](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html) based on AWS CLI/boto3 conventions and stored login info. You can configure the AWS profile name to use via aws_profile_name. Check out the dbt profile configuration below for details. + + + +```yaml +default: + outputs: + dev: + type: athena + s3_staging_dir: [s3_staging_dir] + region_name: [region_name] + database: [database name] + schema: [dev_schema] + aws_profile_name: + [optional, profile to use from your AWS shared credentials file.] + + target: dev +``` + + diff --git a/website/docs/reference/warehouse-setups/azuresynapse-setup.md b/website/docs/reference/warehouse-setups/azuresynapse-setup.md new file mode 100644 index 00000000000..72630c844da --- /dev/null +++ b/website/docs/reference/warehouse-setups/azuresynapse-setup.md @@ -0,0 +1,101 @@ +--- +title: "Microsoft Azure Synapse DWH setup" +meta: + maintained_by: Community + authors: 'dbt-msft community (https://github.com/dbt-msft)' + github_repo: 'dbt-msft/dbt-synapse' + pypi_package: 'dbt-synapse' + min_core_version: 'v0.18.0' + cloud_support: Not Supported + min_supported_version: 'Azure Synapse 10' + slack_channel_name: '#db-synapse' + slack_channel_link: 'https://getdbt.slack.com/archives/C01DRQ178LQ' + platform_name: 'Synapse' + config_page: 'no-configs' +--- + +:::info Community plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +:::info Dedicated SQL only + +Azure Synapse offers both Dedicated SQL Pools and Serverless SQL Pools. +**Only Dedicated SQL Pools are supported by this adapter. If you really insist on using serverless pools, check out the neglected, experimental project: [dbt-synapse-serverless](https://github.com/dbt-msft/dbt-synapse-serverless)** + +::: + +### Prerequisites + +On Debian/Ubuntu make sure you have the ODBC header files before installing + +```bash +sudo apt install unixodbc-dev +``` + +Download and install the [Microsoft ODBC Driver 18 for SQL Server](https://docs.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server?view=sql-server-ver15). +If you already have ODBC Driver 17 installed, then that one will work as well. + +:::tip Default settings change in dbt-synapse v1.2 / ODBC Driver 18 +Microsoft made several changes related to connection encryption. Read more about the changes [here](/reference/warehouse-setups/mssql-setup). +::: + +### Authentication methods + +This adapter is based on the adapter for Microsoft SQL Server. +Therefor, the same authentication methods are supported. + +The configuration is the same except for 1 major difference: +instead of specifying `type: sqlserver`, you specify `type: synapse`. + +Example: + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: synapse + driver: 'ODBC Driver 17 for SQL Server' # (The ODBC Driver installed on your system) + server: workspacename.sql.azuresynapse.net # (Dedicated SQL endpoint of your workspace here) + port: 1433 + database: exampledb + schema: schema_name + user: username + password: password +``` + + + +You can find all the available options and the documentation and how to configure them on [the documentation page for the dbt-sqlserver adapter](/reference/warehouse-setups/mssql-setup). diff --git a/website/docs/reference/warehouse-profiles/bigquery-profile.md b/website/docs/reference/warehouse-setups/bigquery-setup.md similarity index 82% rename from website/docs/reference/warehouse-profiles/bigquery-profile.md rename to website/docs/reference/warehouse-setups/bigquery-setup.md index 7f946e261c3..82c92872758 100644 --- a/website/docs/reference/warehouse-profiles/bigquery-profile.md +++ b/website/docs/reference/warehouse-setups/bigquery-setup.md @@ -1,13 +1,45 @@ --- -title: "BigQuery Profile" +title: "BigQuery setup" +meta: + maintained_by: dbt Labs + authors: 'core dbt maintainers' + github_repo: 'dbt-labs/dbt-bigquery' + pypi_package: 'dbt-bigquery' + min_core_version: 'v0.10.0' + cloud_support: Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-bigquery' + slack_channel_link: 'https://getdbt.slack.com/archives/C99SNSRTK' + platform_name: 'Big Query' + config_page: 'bigquery-configs' --- -## Overview of dbt-bigquery -**Maintained by:** core dbt maintainers -**Author:** dbt Labs -**Source:** [Github](https://github.com/dbt-labs/dbt-bigquery) -**dbt Cloud:** Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/C99SNSRTK) +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    ## Authentication Methods @@ -421,8 +453,8 @@ my-profile: For a general overview of this process, see the official docs for [Creating Short-lived Service Account Credentials](https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials). - - + + ### Execution project New in v0.21.0 @@ -445,6 +477,34 @@ my-profile: execution_project: buck-stops-here-456 ``` + + +### Running Python models on Dataproc + +To run dbt Python models on GCP, dbt uses companion services, Dataproc and Cloud Storage, that offer tight integrations with BigQuery. You may use an existing Dataproc cluster and Cloud Storage bucket, or create new ones: +- https://cloud.google.com/dataproc/docs/guides/create-cluster +- https://cloud.google.com/storage/docs/creating-buckets + +Then, add the bucket name, cluster name, and cluster region to your connection profile: + +```yaml +my-profile: + target: dev + outputs: + dev: + type: bigquery + method: oauth + project: abc-123 + dataset: my_dataset + + # for dbt Python models + gcs_bucket: dbt-python + dataproc_cluster_name: dbt-python + dataproc_region: us-central1 +``` + + + ## Required permissions BigQuery's permission model is dissimilar from more conventional databases like Snowflake and Redshift. The following permissions are required for dbt user accounts: diff --git a/website/docs/reference/warehouse-setups/clickhouse-setup.md b/website/docs/reference/warehouse-setups/clickhouse-setup.md new file mode 100644 index 00000000000..5c66eab6df3 --- /dev/null +++ b/website/docs/reference/warehouse-setups/clickhouse-setup.md @@ -0,0 +1,113 @@ +--- +title: "ClickHouse setup" +meta: + maintained_by: Community + authors: 'Geoff Genz' + github_repo: 'ClickHouse/dbt-clickhouse' + pypi_package: 'dbt-clickhouse' + min_core_version: 'v0.19.0' + cloud_support: Not Supported + min_supported_version: '?' + slack_channel_name: '#db-clickhouse' + slack_channel_link: 'https://getdbt.slack.com/archives/C01DRQ178LQ' + platform_name: 'Clickhouse' + config_page: 'clickhouse-configs' +--- + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +## Connecting to ClickHouse with **dbt-clickhouse** + +To connect to ClickHouse from dbt, you'll need to add a [profile](https://docs.getdbt.com/dbt-cli/configure-your-profile) to your `profiles.yml` file. A ClickHouse profile conforms to the following syntax: + + + +```yaml +: + target: + outputs: + : + type: clickhouse + schema: + user: + password: + #optional fields + driver: http|native + port: + host: + retries: 1 + verify: False + secure: True + connect_timeout: 10 + send_receive_timeout: 300 + sync_request_timeout: 5 + compression: False + compress_block_size: 1048576 + database_engine: + check_exchange: True + custom_settings: + +``` + + + +#### Description of ClickHouse Profile Fields + + +| Field | Description | +|------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `type` | This must be included either in `profiles.yml` or in the `dbt_project.yml` file. Must be set to `clickhouse`. | +| `schema` | Required. A ClickHouse's database name. The dbt model database.schema.table is not compatible with ClickHouse because ClickHouse does not support a schema. So we use a simple model schema.table, where schema is the ClickHouse's database. We don't recommend using the `default` database. | +| `user` | Required. A ClickHouse username with adequate permissions to access the specified `schema`. | +| `password` | Required. The password associated with the specified `user`. | +| `driver` | Optional. The ClickHouse client interface, `http` or `native`. Defaults to `http` unless the `port` is set to 9440 or 9400, in which case the `native` driver is assumed. | +| `port` | Optional. ClickHouse server port number. Defaults to 8123/8443 (secure) if the driver is `http`, and to 9000/9440(secure) if the driver is `native`. | +| `host` | Optional. The host name of the connection. Default is `localhost`. | +| `retries` | Optional. Number of times to retry the initial connection attempt if the error appears to be recoverable. | +| `verify` | Optional. For (`secure=True`) connections, validate the ClickHouse server TLS certificate, including matching hostname, expiration, and signed by a trusted Certificate Authority. Defaults to True. | +| `secure` | Optional. Whether the connection (either http or native) is secured by TLS. This converts an http driver connection to https, and a native driver connection to the native ClickHouse protocol over TLS. the Defaults to False. | +| `cluster_mode` | Optional. Add connection settings to improve compatibility with clusters using the Replicated Database Engine. Default False. | +| `connect_timeout` | Optional. Connection timeout in seconds. Defaults is 10 seconds. | +| `send_receive_timeout` | Optional. Timeout for receiving data from or sending data to ClickHouse. Defaults to 5 minutes (300 seconds) | +| `sync_request_timeout` | Optional. Timeout for connection ping request (native connection only). Defaults to 5 seconds. | +| `compression` | Optional. Use compression in the connection. Defaults to `False`. If set to `True` for HTTP, this enables gzip compression. If set to `True` for the native protocol, this enabled lz4 compression. Other valid values are `lz4hc` and `zstd` for the native driver only. | +| `compress_block_size` | Optional. Compression block size (in bytes) when using compression with the native driver. Defaults to 1MB | +| `database_engine` | Optional. Database engine to use when creating new ClickHouse schemas (databases). If not set (the default), new databases will use the default ClickHouse database engine (usually Atomic). | +| `check_exchange` | Optional. On connecting to the ClickHouse, if this is parameter is `True` DBT will validate that the ClickHouse server supports atomic exchange of tables. Using atomic exchange (when available) improves reliability and parallelism. This check is unnecessary for ClickHouse running on recent Linux operating system, and in those circumstances can be disabled by setting `check_exchange` to `False` to avoid additional overhead on startup. Defaults to `True`. | +| `custom_settings` | Optional. A mapping of ClickHouse specific user settings to use with the connection. See the ClickHouse documentation for supported settings. | + +#### Troubleshooting Connections + +If you encounter issues connecting to ClickHouse from dbt, make sure the following criteria are met: +- The engine must be one of the [supported engines](clickhouse-configs#supported-table-engines). +- You must have adequate permissions to access the database. +- If you're not using the default table engine for the database, you must specify a table engine in your model configuration. \ No newline at end of file diff --git a/website/docs/reference/warehouse-setups/databricks-setup.md b/website/docs/reference/warehouse-setups/databricks-setup.md new file mode 100644 index 00000000000..86439aa2484 --- /dev/null +++ b/website/docs/reference/warehouse-setups/databricks-setup.md @@ -0,0 +1,98 @@ +--- +title: "Databricks setup" +id: "databricks-setup" +meta: + maintained_by: Databricks + authors: 'some dbt loving Bricksters' + github_repo: 'databricks/dbt-databricks' + pypi_package: 'dbt-databricks' + min_core_version: 'v0.18.0' + cloud_support: Coming Soon + min_supported_version: 'n/a' + slack_channel_name: '#db-databricks-and-spark' + slack_channel_link: 'https://getdbt.slack.com/archives/CNGCW8HKL' + platform_name: 'Databricks' + config_page: 'spark-configs' +--- + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +## Installation and Distribution + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +### Set up a Databricks Target + +dbt-databricks can connect to Databricks all-purpose clusters as well as SQL endpoints. +The latter provides an opinionated way of running SQL workloads with optimal performance and price; the former provides all the flexibility of Spark. + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: databricks + catalog: [optional catalog name, if you are using Unity Catalog, is only available in dbt-databricks>=1.1.1] + schema: [schema name] + host: [yourorg.databrickshost.com] + http_path: [/sql/your/http/path] + token: [dapiXXXXXXXXXXXXXXXXXXXXXXX] # Personal Access Token (PAT) + threads: [1 or more] # optional, default 1 +``` + + + +See the [Databricks documentation](https://docs.databricks.com/dev-tools/dbt.html#) on how +to obtain the credentials for configuring your profile. + +## Caveats + +### Supported Functionality + +Most dbt Core functionality is supported, but some features are only available +on Delta Lake. + +Delta-only features: +1. Incremental model updates by `unique_key` instead of `partition_by` (see [`merge` strategy](spark-configs#the-merge-strategy)) +2. [Snapshots](https://docs.getdbt.com/docs/building-a-dbt-project/snapshots) + +### Choosing between dbt-databricks and dbt-spark + +While `dbt-spark` can be used to connect to Databricks, `dbt-databricks` was created to make it +even easier to use dbt with the Databricks Lakehouse. + +`dbt-databricks` includes: +- No need to install additional drivers or dependencies for use on the CLI +- Use of Delta Lake for all models out of the box +- SQL macros that are optimized to run with [Photon](https://docs.databricks.com/runtime/photon.html) + +### Support for Unity Catalog + +The adapter `dbt-databricks>=1.1.1` supports the 3-level namespace of Unity Catalog (catalog / schema / relations) so you can organize and secure your data the way you like. diff --git a/website/docs/reference/warehouse-setups/dremio-setup.md b/website/docs/reference/warehouse-setups/dremio-setup.md new file mode 100644 index 00000000000..66da2980831 --- /dev/null +++ b/website/docs/reference/warehouse-setups/dremio-setup.md @@ -0,0 +1,194 @@ +--- +title: "Dremio setup" +meta: + maintained_by: Dremio + authors: 'Dremio (formerly Fabrice Etanchaud)' + github_repo: 'dremio/dbt-dremio' + pypi_package: 'dbt-dremio' + min_core_version: 'v1.1.0' + cloud_support: Not Supported + min_supported_version: 'Dremio 22.0' + slack_channel_name: 'n/a' + slack_channel_link: 'https://www.getdbt.com/community' + platform_name: 'Dremio' + config_page: 'no-configs' +--- + +:::info Vendor plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +Follow the repository's link for os dependencies. + +## Prerequisites for Dremio Cloud +Before connecting from project to Dremio Cloud, follow these prerequisite steps: +* Ensure that you have the ID of the Sonar project that you want to use. See [Obtaining the ID of a Project](https://docs.dremio.com/cloud/cloud-entities/projects/#obtaining-the-id-of-a-project). +* Ensure that you have a personal access token (PAT) for authenticating to Dremio Cloud. See [Creating a Token](https://docs.dremio.com/cloud/security/authentication/personal-access-token/#creating-a-token). + + +## Prerequisites for Dremio Software + +* Ensure that you are using version 22.0 or later. +* Enable these support keys in your Dremio cluster: + * `dremio.iceberg.enabled` + * `dremio.iceberg.ctas.enabled` + * `dremio.execution.support_unlimited_splits` + + See Support Keys in the Dremio documentation for the steps. +* If you want to use TLS to secure the connection between dbt and Dremio Software, configure full wire encryption in your Dremio cluster. For instructions, see Configuring Wire Encryption. + + +## Initializing a Project + +1. Run the command `dbt init `. +2. Select `dremio` as the database to use. +3. Select one of these options to generate a profile for your project: + * `dremio_cloud` for working with Dremio Cloud + * `software_with_username_password` for working with a Dremio Software cluster and authenticating to the cluster with a username and a password + * `software_with_pat` for working with a Dremio Software cluster and authenticating to the cluster with a personal access token +4. Append these lines to the end of the content of the `dbt_project.yml` file at the root of your project directory: +``` +vars: + dremio:reflections_enabled: false +``` + +Next, configure the profile for your project. + +## Profiles + +When you initialize a project, you create one of these three profiles. You must configure it before trying to connect to Dremio Cloud or Dremio Software. + +## Profiles + +When you initialize a project, you create one of these three profiles. You must configure it before trying to connect to Dremio Cloud or Dremio Software. + +* Profile for Dremio Cloud +* Profile for Dremio Software with Username/Password Authentication +* Profile for Dremio Software with Authentication Through a Personal Access Token + +For descriptions of the configurations in these profiles, see [Configurations](#configurations). + + + + + +```yaml +[project name]: + outputs: + dev: + cloud_host: https://api.dremio.cloud + cloud_project_id: [project ID] + pat: [personal access token] + threads: [integer >= 1] + type: dremio + use_ssl: true + user: [email address] + target: dev +``` + + + + + +```yaml +[project name]: + outputs: + dev: + password: [password] + port: [port] + software_host: [hostname or IP address] + threads: [integer >= 1] + type: dremio + use_ssl: [true|false] + user: [username] + target: dev +``` + + + + + +```yaml +[project name]: + outputs: + dev: + pat: [personal access token] + port: [port] + software_host: [hostname or IP address] + threads: [integer >= 1] + type: dremio + use_ssl: [true|false] + user: [username] + target: dev +``` + + + + +## Configurations + +### Configurations Common to Profiles for Dremio Cloud and Dremio Software + +| Configuration | Required? | Default Value | Description | +| --- | --- | --- | --- | +| `type` | Yes | dremio | Auto-populated when creating a Dremio project. Do not change this value. | +| `threads` | Yes | 1 | The number of threads the dbt project runs on. | + +### Configurations in Profiles for Dremio Cloud +| Configuration | Required? | Default Value | Description | +| --- | --- | --- | --- | +| `cloud_host` | Yes | `https://api.dremio.cloud` | US Control Plane: `https://api.dremio.cloud`

    EU Control Plane: `https://api.eu.dremio.cloud` | +| `user` | Yes | None | Email address used as a username in Dremio Cloud | +| `pat` | Yes | None | Personal Access TokenSee Personal Access Tokens for instructions about obtaining a token. | +| `cloud_project_id` | Yes | None | The ID of the Sonar project in which to run transformations. | +| `use_ssl` | Yes | `true` | The value must be `true`. | + +### Configurations in Profiles for Dremio Software +| Configuration | Required? | Default Value | Description | +| --- | --- | --- | --- | +| `software_host` | Yes | None | The hostname or IP address of the coordinator node of the Dremio cluster. | +| `port` | Yes | `9047` | Port for Dremio Software cluster API endpoints. | +| `user` | Yes | None | The username of the account to use when logging into the Dremio cluster. | +| `password` | Yes, if you are not using the pat configuration. | None | The password of the account to use when logging into the Dremio cluster. | +| `pat` | Yes, if you are not using the user and password configurations. | None | The personal access token to use for authenticating to Dremio.See Personal Access Tokens for instructions about obtaining a token.The use of a personal access token takes precedence if values for the three configurations user, password and pat are specified. | +| `use_ssl` | Yes | `true` | Acceptable values are `true` and `false`. If the value is set to true, ensure that full wire encryption is configured in your Dremio cluster. See [Prerequisites for Dremio Software](#prerequisites-for-dremio-software). | \ No newline at end of file diff --git a/website/docs/reference/warehouse-setups/duckdb-setup.md b/website/docs/reference/warehouse-setups/duckdb-setup.md new file mode 100644 index 00000000000..d4488a2ead6 --- /dev/null +++ b/website/docs/reference/warehouse-setups/duckdb-setup.md @@ -0,0 +1,75 @@ +--- +title: "DuckDB setup" +meta: + maintained_by: Community + authors: 'Josh Wills (https://github.com/jwills)' + github_repo: 'jwills/dbt-duckdb' + pypi_package: 'dbt-duckdb' + min_core_version: 'v1.0.1' + cloud_support: Not Supported + min_supported_version: 'DuckDB 0.3.2' + slack_channel_name: '#db-duckdb' + slack_channel_link: 'https://getdbt.slack.com/archives/C039D1J1LA2' + platform_name: 'Duck DB' + config_page: 'no-configs' +--- + +:::info Community plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +## Connecting to DuckDB with dbt-duckdb + +[DuckDB](http://duckdb.org) is an embedded database, similar to SQLite, but designed for OLAP-style analytics instead of OLTP. The only configuration parameter that is required in your profile (in addition to `type: duckdb`) is the `path` field, which should refer to a path on your local filesystem where you would like the DuckDB database file (and it's associated write-ahead log) to be written. You can also specify the `schema` parameter if you would like to use a schema besides the default (which is called `main`). + +There is also a `database` field defined in the `DuckDBCredentials` class for consistency with the parent `Credentials` class, but it defaults to `main` and setting it to be something else will likely cause strange things to happen that cannot be fully predicted, so please avoid changing it. + +Example: + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: duckdb + path: 'file_path/database_name.duckdb' + #optional fields + schema: schema_name +``` + + + + diff --git a/website/docs/reference/warehouse-setups/exasol-setup.md b/website/docs/reference/warehouse-setups/exasol-setup.md new file mode 100644 index 00000000000..1e29db02513 --- /dev/null +++ b/website/docs/reference/warehouse-setups/exasol-setup.md @@ -0,0 +1,76 @@ +--- +title: "Exasol setup" +meta: + maintained_by: Community + authors: 'Torsten Glunde, Ilija Kutle' + github_repo: 'tglunde/dbt-exasol' + pypi_package: 'dbt-exasol' + min_core_version: 'v0.14.0' + cloud_support: Not Supported + min_supported_version: 'Exasol 6.x' + slack_channel_name: 'n/a' + slack_channel_link: 'https://www.getdbt.com/community' + platform_name: 'Exasol' + config_page: 'no-configs' +--- + +:::info Community plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + dbt-exasol + +### Connecting to Exasol with **dbt-exasol** + +#### User / password authentication + +Configure your dbt profile for using Exasol: + +##### Exasol connection information + + + +```yaml +dbt-exasol: + target: dev + outputs: + dev: + type: exasol + threads: 1 + dsn: HOST:PORT + user: USERNAME + password: PASSWORD + dbname: db + schema: SCHEMA +``` + + diff --git a/website/docs/reference/warehouse-profiles/firebolt-profile.md b/website/docs/reference/warehouse-setups/firebolt-setup.md similarity index 68% rename from website/docs/reference/warehouse-profiles/firebolt-profile.md rename to website/docs/reference/warehouse-setups/firebolt-setup.md index c3d0a5aced5..7c724307131 100644 --- a/website/docs/reference/warehouse-profiles/firebolt-profile.md +++ b/website/docs/reference/warehouse-setups/firebolt-setup.md @@ -1,26 +1,51 @@ --- -title: "Firebolt Profile" +title: "Firebolt setup" +meta: + maintained_by: Firebolt + authors: 'Firebolt' + github_repo: 'firebolt-db/dbt-firebolt' + pypi_package: 'dbt-firebolt' + min_core_version: 'v1.1.0' + cloud_support: Not Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-firebolt' + slack_channel_link: 'https://getdbt.slack.com/archives/C03K2PTHHTP' + platform_name: 'Firebolt' + config_page: 'firebolt-configs' --- Some core functionality may be limited. If you're interested in contributing, check out the source code for the repository listed below. -## Overview of dbt-firebolt +

    Overview of {frontMatter.meta.pypi_package}

    -**Maintained by:** Firebolt -**Author:** Anders Swanson and Eric Ford -**Source:** [GitHub](https://github.com/firebolt-db/dbt-firebolt) -**dbt Slack channel:** [#db-firebolt](https://getdbt.slack.com/archives/C02PYT5CXN0) -**dbt Cloud:** Not Supported +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    -![dbt-firebolt stars](https://img.shields.io/github/stars/firebolt-db/dbt-firebolt?style=for-the-badge) -The package can be installed from PyPI with: +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    -``` -pip install dbt-firebolt -``` For other information including Firebolt feature support, see the [GitHub README](https://github.com/firebolt-db/dbt-firebolt/blob/main/README.md) and the [changelog](https://github.com/firebolt-db/dbt-firebolt/blob/main/CHANGELOG.md). @@ -45,7 +70,6 @@ To connect to Firebolt from dbt, you'll need to add a [profile](https://docs.get threads: 1 #optional fields jar_path: - api_endpoint: "" host: "" account_name: "" ``` @@ -67,7 +91,6 @@ To specify values as environment variables, use the format `{{ env_var(' Overview of {frontMatter.meta.pypi_package} -**Maintained by:** Community -**Author:** Benjamin Menuet, Moshir Mikael, Armando Segnini and Amine El Mallem -**Source:** [Github](https://github.com/aws-samples/dbt-glue) -**Core version:** v0.24.0 and newer -**dbt Cloud:** Not Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/C02R4HSMBAT) +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    -![dbt-glue stars](https://img.shields.io/github/stars/aws-samples/dbt-glue?style=for-the-badg) +

    Installing {frontMatter.meta.pypi_package}

    -The package can be installed from PyPI with: +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    -```bash -$ pip install dbt-glue -``` For further (and more likely up-to-date) info, see the [README](https://github.com/aws-samples/dbt-glue#readme) @@ -48,11 +71,11 @@ Please to update variables between **`<>`**, here are explanations of these argu |Args |Description | |---|---| -|region|The region where you're Glue database is stored | +|region|The region where your Glue database is stored | |AWS Account|The AWS account where you run your pipeline| |dbt output database|The database updated by dbt (this is the database configured in the profile.yml of your dbt environment)| |dbt source database|All databases used as source| -|dbt output bucket|The bucket name where the data will be generate dbt (the location configured in the profile.yml of your dbt environment)| +|dbt output bucket|The bucket name where the data will be generated by dbt (the location configured in the profile.yml of your dbt environment)| |dbt source bucket|The bucket name of source databases (if they are not managed by Lake Formation)| @@ -223,7 +246,7 @@ The table below describes all the options. |type |The driver to use. |yes| |query-comment |A string to inject as a comment in each query that dbt runs. |no| |role_arn |The ARN of the interactive session role created as part of the CloudFormation template. |yes| -|region |The AWS Region were you run the data pipeline. |yes| +|region |The AWS Region where you run the data pipeline. |yes| |workers |The number of workers of a defined workerType that are allocated when a job runs. |yes| |worker_type |The type of predefined worker that is allocated when a job runs. Accepts a value of Standard, G.1X, or G.2X. |yes| |schema |The schema used to organize data stored in Amazon S3. |yes| diff --git a/website/docs/reference/warehouse-setups/greenplum-setup.md b/website/docs/reference/warehouse-setups/greenplum-setup.md new file mode 100644 index 00000000000..2d708bb3d5f --- /dev/null +++ b/website/docs/reference/warehouse-setups/greenplum-setup.md @@ -0,0 +1,80 @@ +--- +title: "Greenplum setup" +id: "greenplum-setup" +meta: + maintained_by: Community + authors: 'Mark Poroshin, Dmitry Bevz' + github_repo: 'markporoshin/dbt-greenplum' + pypi_package: 'dbt-greenplum' + min_core_version: 'v1.0.0' + cloud_support: Not Supported + min_supported_version: 'Greenplum 6.0' + slack_channel_name: 'n/a' + slack_channel_link: 'https://www.getdbt.com/community' + platform_name: 'Greenplum' + config_page: 'greenplum-configs' +--- + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +For further (and more likely up-to-date) info, see the [README](https://github.com/markporoshin/dbt-greenplum#README.md) + + +## Profile Configuration + +Greenplum targets should be set up using the following configuration in your `profiles.yml` file. + + + +```yaml +company-name: + target: dev + outputs: + dev: + type: greenplum + host: [hostname] + user: [username] + password: [password] + port: [port] + dbname: [database name] + schema: [dbt schema] + threads: [1 or more] + keepalives_idle: 0 # default 0, indicating the system default. See below + connect_timeout: 10 # default 10 seconds + search_path: [optional, override the default postgres search_path] + role: [optional, set the role dbt assumes when executing queries] + sslmode: [optional, set the sslmode used to connect to the database] + +``` + + + +### Notes + +This adapter strongly depends on dbt-postgres, so you can read more about configurations here [Profile Setup](postgres-setup) diff --git a/website/docs/reference/warehouse-setups/hive-setup.md b/website/docs/reference/warehouse-setups/hive-setup.md new file mode 100644 index 00000000000..e798bf37fe2 --- /dev/null +++ b/website/docs/reference/warehouse-setups/hive-setup.md @@ -0,0 +1,174 @@ +--- +title: "Apache Hive setup" +id: "hive-setup" +meta: + maintained_by: Cloudera + authors: 'Cloudera' + github_repo: 'cloudera/dbt-hive' + pypi_package: 'dbt-hive' + min_core_version: 'v1.1.0' + cloud_support: Not Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-hive' + slack_channel_link: 'https://getdbt.slack.com/archives/C0401DTNSKW' + platform_name: 'Hive' + config_page: 'hive-configs' +--- + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +## Connection Methods + +dbt-hive can connect to Apache Hive and Cloudera Data Platform clusters. The [Impyla](https://github.com/cloudera/impyla/) library is used to establish connections to Hive. + +dbt-hive supports two transport mechanisms: +- binary +- HTTP(S) + +The default mechanism is `binary`. To use HTTP transport, use the boolean option `use_http_transport: [true / false]`. + +## Authentication Methods + +dbt-hive supports two authentication mechanisms: +- [`insecure`](#Insecure) No authentication is used, only recommended for testing. +- [`ldap`](#ldap) Authentication via LDAP + +### Insecure + +This method is only recommended if you have a local install of Hive and want to test out the dbt-hive adapter. + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: hive + host: localhost + port: [port] # default value: 10000 + schema: [schema name] + +``` + + + +### LDAP + +LDAP allows you to authenticate with a username and password when Hive is [configured with LDAP Auth](https://cwiki.apache.org/confluence/display/Hive/Setting+Up+HiveServer2). LDAP is supported over Binary & HTTP connection mechanisms. + +This is the recommended authentication mechanism to use with Cloudera Data Platform (CDP). + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: hive + host: [host name] + http_path: [optional, http path to Hive] # default value: None + port: [port] # default value: 10000 + auth_type: ldap + use_http_transport: [true / false] # default value: true + use_ssl: [true / false] # TLS should always be used with LDAP to ensure secure transmission of credentials, default value: true + username: [username] + password: [password] + schema: [schema name] +``` + + + +Note: When creating workload user in CDP, make sure the user has CREATE, SELECT, ALTER, INSERT, UPDATE, DROP, INDEX, READ and WRITE permissions. If you need the user to execute GRANT statements, you should also configure the appropriate GRANT permissions for them. When using Apache Ranger, permissions for allowing GRANT are typically set using "Delegate Admin" option. For more information, see [`grants`](/reference/resource-configs/grants) and [on-run-start & on-run-end](/reference/project-configs/on-run-start-on-run-end). + +### Kerberos + +The Kerberos authentication mechanism uses GSSAPI to share Kerberos credentials when Hive is [configured with Kerberos Auth](https://ambari.apache.org/1.2.5/installing-hadoop-using-ambari/content/ambari-kerb-2-3-3.html). + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: hive + host: [hostname] + port: [port] # default value: 10000 + auth_type: [GSSAPI] + kerberos_service_name: [kerberos service name] # default value: None + use_http_transport: true # default value: true + use_ssl: true # TLS should always be used to ensure secure transmission of credentials, default value: true + schema: [schema name] + +``` + + + +Note: A typical setup of Cloudera Private Cloud will involve the following steps to setup Kerberos before one can execute dbt commands: +- Get the correct realm config file for your installation (krb5.conf) +- Set environment variable to point to the config file (export KRB5_CONFIG=/path/to/krb5.conf) +- Set correct permissions for config file (sudo chmod 644 /path/to/krb5.conf) +- Obtain keytab using kinit (kinit username@YOUR_REALM.YOUR_DOMAIN) +- The keytab is valid for certain period after which you will need to run kinit again to renew validity of the keytab. +- User will need CREATE, DROP, INSERT permissions on the schema provided in profiles.yml + +### Instrumentation +By default, the adapter will collect instrumentation events to help improve functionality and understand bugs. If you want to specifically switch this off, for instance, in a production environment, you can explicitly set the flag `usage_tracking: false` in your `profiles.yml` file. + +## Installation and Distribution + +dbt's adapter for Apache Hive is managed in its own repository, [dbt-hive](https://github.com/cloudera/dbt-hive). To use it, +you must install the `dbt-hive` plugin. + +### Using pip +The following commands will install the latest version of `dbt-hive` as well as the requisite version of `dbt-core` and `impyla` driver used for connections. + +``` +pip install dbt-hive +``` + +### Supported Functionality + +| Name | Supported | +|------|-----------| +|Materialization: Table|Yes| +|Materialization: View|Yes| +|Materialization: Incremental - Append|Yes| +|Materialization: Incremental - Insert+Overwrite|Yes| +|Materialization: Incremental - Merge|No| +|Materialization: Ephemeral|No| +|Seeds|Yes| +|Tests|Yes| +|Snapshots|No| +|Documentation|Yes| +|Authentication: LDAP|Yes| +|Authentication: Kerberos|Yes| diff --git a/website/docs/reference/warehouse-setups/ibmdb2-setup.md b/website/docs/reference/warehouse-setups/ibmdb2-setup.md new file mode 100644 index 00000000000..c601b1cb763 --- /dev/null +++ b/website/docs/reference/warehouse-setups/ibmdb2-setup.md @@ -0,0 +1,112 @@ +--- +title: "IBM DB2 setup" +id: "ibmdb2-setup" +meta: + maintained_by: Community + authors: 'Rasmus Nyberg (https://github.com/aurany)' + github_repo: 'aurany/dbt-ibmdb2' + pypi_package: 'dbt-ibmdb2' + min_core_version: 'v1.0.4' + cloud_support: Not Supported + min_supported_version: 'IBM DB2 V9fp2' + slack_channel_name: 'n/a' + slack_channel_link: 'https://www.getdbt.com/community' + platform_name: 'IBM DB2' + config_page: 'no-configs' +--- + +:::info Community plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +## Overview of dbt-ibmdb2 + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +This is an experimental plugin: +- We have not tested it extensively +- Tested with [dbt-adapter-tests](https://pypi.org/project/pytest-dbt-adapter/) and DB2 LUW on Mac OS+RHEL8 +- Compatibility with other [dbt packages](https://hub.getdbt.com/) (like [dbt_utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/)) is only partially tested + +## Connecting to IBM DB2 with dbt-ibmdb2 + +IBM DB2 targets should be set up using the following configuration in your `profiles.yml` file. + +Example: + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: ibmdb2 + schema: analytics + database: test + host: localhost + port: 50000 + protocol: TCPIP + username: my_username + password: my_password +``` + + + +#### Description of IBM DB2 Profile Fields + +| Option | Description | Required? | Example | +| --------------- | ----------------------------------------------------------------------------------- | ------------------------------------------------------------------ | ---------------------------------------------- | +| type | The specific adapter to use | Required | `ibmdb2` | +| schema | Specify the schema (database) to build models into | Required | `analytics` | +| database | Specify the database you want to connect to | Required | `testdb` | +| host | Hostname or IP-adress | Required | `localhost` | +| port | The port to use | Optional | `50000` | +| protocol | Protocol to use | Optional | `TCPIP` | +| username | The username to use to connect to the server | Required | `my-username` | +| password | The password to use for authenticating to the server | Required | `my-password` | + + +## Supported features + +| DB2 LUW | DB2 z/OS | Feature | +|:---------:|:---:|---------------------| +| ✅ | 🤷 | Table materialization | +| ✅ | 🤷 | View materialization | +| ✅ | 🤷 | Incremental materialization | +| ✅ | 🤷 | Ephemeral materialization | +| ✅ | 🤷 | Seeds | +| ✅ | 🤷 | Sources | +| ✅ | 🤷 | Custom data tests | +| ✅ | 🤷 | Docs generate | +| ✅ | 🤷 | Snapshots | + +## Notes +- dbt-ibmdb2 is built on the ibm_db python package and there are some known encoding issues related to z/OS. diff --git a/website/docs/reference/warehouse-setups/impala-setup.md b/website/docs/reference/warehouse-setups/impala-setup.md new file mode 100644 index 00000000000..121eca94310 --- /dev/null +++ b/website/docs/reference/warehouse-setups/impala-setup.md @@ -0,0 +1,173 @@ +--- +title: "Apache Impala setup" +id: "impala-setup" +meta: + maintained_by: Cloudera + authors: 'Cloudera' + github_repo: 'cloudera/dbt-impala' + pypi_package: 'dbt-impala' + min_core_version: 'v1.1.0' + cloud_support: Not Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-impala' + slack_channel_link: 'https://getdbt.slack.com/archives/C01PWAH41A5' + platform_name: 'Impala' + config_page: 'impala-configs' +--- + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +## Connection Methods + +dbt-impala can connect to Apache Impala and Cloudera Data Platform clusters. + +The [Impyla](https://github.com/cloudera/impyla/) library is used to establish connections to Impala. + +Two transport mechanisms are supported: +- binary +- HTTP(S) + +The default mechanism is `binary`. To use HTTP transport, use the boolean option `use_http_transport: [true / false]`. + +## Authentication Methods + +dbt-impala supports three authentication mechanisms: +- [`insecure`](#Insecure) No authentication is used, only recommended for testing. +- [`ldap`](#ldap) Authentication via LDAP +- [`kerbros`](#kerbros) Authentication via Kerberos (GSSAPI) + +### Insecure + +This method is only recommended if you have a local install of Impala and want to test out the dbt-impala adapter. + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: impala + host: [host] # default value: localhost + port: [port] # default value: 21050 + dbname: [db name] # this should be same as schema name provided below, starting with 1.1.2 this parameter is optional + schema: [schema name] + +``` + + + +### LDAP + +LDAP allows you to authenticate with a username & password when Impala is [configured with LDAP Auth](https://impala.apache.org/docs/build/html/topics/impala_ldap.html). LDAP is supported over Binary & HTTP connection mechanisms. + +This is the recommended authentication mechanism to use with Cloudera Data Platform (CDP). + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: impala + host: [host name] + http_path: [optional, http path to Impala] + port: [port] # default value: 21050 + auth_type: ldap + use_http_transport: [true / false] # default value: true + use_ssl: [true / false] # TLS should always be used with LDAP to ensure secure transmission of credentials, default value: true + username: [username] + password: [password] + dbname: [db name] # this should be same as schema name provided below, starting with 1.1.2 this parameter is optional + schema: [schema name] + retries: [retries] # number of times impyla attempts retry conneciton to warehouse, default value: 3 + +``` + + + +Note: When creating workload user in CDP ensure that the user has CREATE, SELECT, ALTER, INSERT, UPDATE, DROP, INDEX, READ and WRITE permissions. If the user is required to execute GRANT statements, see for instance (https://docs.getdbt.com/reference/resource-configs/grants) or (https://docs.getdbt.com/reference/project-configs/on-run-start-on-run-end) appropriate GRANT permissions should be configured. When using Apache Ranger, permissions for allowing GRANT are typically set using "Delegate Admin" option. + +### Kerberos + +The Kerberos authentication mechanism uses GSSAPI to share Kerberos credentials when Impala is [configured with Kerberos Auth](https://impala.apache.org/docs/build/html/topics/impala_kerberos.html). + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: impala + host: [hostname] + port: [port] # default value: 21050 + auth_type: [GSSAPI] + kerberos_service_name: [kerberos service name] # default value: None + use_http_transport: true # default value: true + use_ssl: true # TLS should always be used with LDAP to ensure secure transmission of credentials, default value: true + dbname: [db name] # this should be same as schema name provided below, starting with 1.1.2 this parameter is optional + schema: [schema name] + retries: [retries] # number of times impyla attempts retry conneciton to warehouse, default value: 3 + +``` + + + +Note: A typical setup of Cloudera EDH will involve the following steps to setup Kerberos before one can execute dbt commands: +- Get the correct realm config file for your installation (krb5.conf) +- Set environment variable to point to the config file (export KRB5_CONFIG=/path/to/krb5.conf) +- Set correct permissions for config file (sudo chmod 644 /path/to/krb5.conf) +- Obtain keytab using kinit (kinit username@YOUR_REALM.YOUR_DOMAIN) +- The keytab is valid for certain period after which you will need to run kinit again to renew validity of the keytab. + +### Instrumentation + +By default, the adapter will send instrumentation events to Cloudera to help improve functionality and understand bugs. If you want to specifically switch this off, for instance, in a production environment, you can explicitly set the flag `usage_tracking: false` in your `profiles.yml` file. + +Relatedly, if you'd like to turn off dbt Lab's anonymous usage tracking, see [YAML Configurations: Send anonymous usage stats](https://docs.getdbt.com/reference/global-configs#send-anonymous-usage-stats) for more info + +### Supported Functionality + +| Name | Supported | +|------|-----------| +|Materialization: Table|Yes| +|Materialization: View|Yes| +|Materialization: Incremental - Append|Yes| +|Materialization: Incremental - Insert+Overwrite|Yes| +|Materialization: Incremental - Merge|No| +|Materialization: Ephemeral|No| +|Seeds|Yes| +|Tests|Yes| +|Snapshots|Yes| +|Documentation|Yes| +|Authentication: LDAP|Yes| +|Authentication: Kerberos|Yes| diff --git a/website/docs/reference/warehouse-setups/iomete-setup.md b/website/docs/reference/warehouse-setups/iomete-setup.md new file mode 100644 index 00000000000..bbc02a56683 --- /dev/null +++ b/website/docs/reference/warehouse-setups/iomete-setup.md @@ -0,0 +1,90 @@ +--- +title: "iomete setup" +id: "iomete-setup" +meta: + maintained_by: iomete + authors: 'Namig Aliyev' + github_repo: 'iomete/dbt-iomete' + pypi_package: 'dbt-iomete' + min_core_version: 'v0.18.0' + cloud_support: Not Supported + min_supported_version: 'n/a' + slack_channel_name: '##db-iomete' + slack_channel_link: 'https://getdbt.slack.com/archives/C03JFG22EP9' + platform_name: 'iomete' + config_page: 'no-configs' +--- + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +## Installation and Distribution + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +Set up a iomete Target + +iomete targets should be set up using the following configuration in your profiles.yml file. + + + +```yaml +iomete: + target: dev + outputs: + dev: + type: iomete + cluster: cluster_name + host: .iomete.com + port: 443 + schema: database_name + account_number: iomete_account_number + user: iomete_user_name + password: iomete_user_password +``` + + + +##### Description of Profile Fields + +| Field | Description | Required | Example | +|----------|-----------------------------------------------------------------------------------------------------------------------------------------|----------|------------------------| +| type | The specific adapter to use | Required | `iomete` | +| cluster | The cluster to connect | Required | `reporting` | +| host | The host name of the connection. It is a combination of
    `account_number` with the prefix `dwh-`
    and the suffix `.iomete.com`. | Required | `dwh-12345.iomete.com` | +| port | The port to use. | Required | `443` | +| schema | Specify the schema (database) to build models into. | Required | `dbt_finance` | +| account_number | The iomete account number with single quotes. | Required | `'1234566789123'` | +| username | The iomete username to use to connect to the server. | Required | `dbt_user` | +| password | The iomete user password to use to connect to the server. | Required | `strong_password` | + +## Supported Functionality + +Most dbt Core functionality is supported. + +Iceberg specific improvements. +1. Joining the results of `show tables` and `show views`. diff --git a/website/docs/reference/warehouse-setups/layer-setup.md b/website/docs/reference/warehouse-setups/layer-setup.md new file mode 100644 index 00000000000..f1a9b8b8940 --- /dev/null +++ b/website/docs/reference/warehouse-setups/layer-setup.md @@ -0,0 +1,165 @@ +--- +title: "Layer setup" +id: "layer-setup" +meta: + maintained_by: Layer + authors: 'Mehmet Ecevit' + github_repo: 'layerai/dbt-layer' + pypi_package: 'dbt-layer-bigquery' + min_core_version: 'v1.0.0' + cloud_support: Not Supported + min_supported_version: 'n/a' + slack_channel_name: '#tools-layer' + slack_channel_link: 'https://getdbt.slack.com/archives/C03STA39TFE' + platform_name: 'Layer' + config_page: 'no-configs' +--- + + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +### Profile Configuration + +Layer Bigquery targets should be set up using the following sections in your `profiles.yml` file. +#### Layer Authentication +Add your `layer_api_key` to your `profiles.yaml` to authenticate with Layer. To get your Layer API Key: +- First, [create your free Layer account](https://app.layer.ai/login?returnTo=%2Fgetting-started). +- Go to [app.layer.ai](https://app.layer.ai) > **Settings** (Cog Icon by your profile photo) > **Developer** > **Create API key** to get your Layer API Key. + +#### Bigquery Authentication +You can use any [authentication method](https://docs.getdbt.com/reference/warehouse-profiles/bigquery-setup) supported in the official dbt Bigquery adapter since Layer uses `dbt-bigquery` adapter to connect to your Bigquery instance. + + +A sample profile: + + + +```yaml +layer-profile: + target: dev + outputs: + dev: + # Layer authentication + type: layer_bigquery + layer_api_key: [the API Key to access your Layer account (opt)] + # Bigquery authentication + method: service-account + project: [GCP project id] + dataset: [the name of your dbt dataset] + threads: [1 or more] + keyfile: [/path/to/bigquery/keyfile.json] +``` + + + +#### Description of Layer Bigquery Profile Fields + +The following fields are required: + +Parameter | Default | Type | Description +----------------------- | ----------- |--------------| --- +`type` | | string | Specifies the adapter you want to use. It should be `layer_bigquery`. +`layer_api_key` | | string (opt) | Specifies your Layer API key. If you want to make predictions with public ML models from Layer, you don't need to have this key in your profile. It's required if you load ML models from your Layer account or train an AutoML model. +`layer_project` | | string (opt) | Specifies your target Layer project. If you don't specify, Layer will use the project same name with your dbt project. +`method` | | string | Specifies the authentication type to connect to your BigQuery. + +Rest of the parameters depends on the BigQuery authentication method you specified. + +## Usage + +### AutoML + +You can automatically build state-of-art ML models using your own dbt models with plain SQL. To train an AutoML model all you have to do is pass your model type, input data (features) and target column you want to predict to `layer.automl()` in your SQL. The Layer AutoML will pick the best performing model and enable you to call it by its dbt model name to make predictions as shown above. + +_Syntax:_ +``` +layer.automl("MODEL_TYPE", ARRAY[FEATURES], TARGET) +``` + +_Parameters:_ + +| Syntax | Description | +| --------- |---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `MODEL_TYPE` | Type of the model your want to train. There are two options:
    - `classifier`: A model to predict classes/labels or categories such as spam detection
    - `regressor`: A model to predict continious outcomes such as CLV prediction. | +| `FEATURES` | Input column names as a list to train your AutoML model. | +| `TARGET` | Target column that you want to predict. | + + +_Requirements:_ +- You need to put `layer_api_key` to your dbt profile to make AutoML work. + +_Example:_ + +Check out [Order Review AutoML Project](https://github.com/layerai/dbt-layer/tree/mecevit/update-docs/examples/order_review_prediction): + +```sql +SELECT order_id, + layer.automl( + -- This is a regression problem + 'regressor', + -- Data (input features) to train our model + ARRAY[ + days_between_purchase_and_delivery, order_approved_late, + actual_delivery_vs_expectation_bucket, total_order_price, total_order_freight, is_multiItems_order,seller_shipped_late], + -- Target column we want to predict + review_score + ) +FROM {{ ref('training_data') }} +``` + +### Prediction + +You can make predictions using any Layer ML model within your dbt models. Layer dbt Adapter helps you score your data resides on your warehouse within your dbt DAG with SQL. + +_Syntax:_ +``` +layer.predict("LAYER_MODEL_PATH", ARRAY[FEATURES]) +``` + +_Parameters:_ + +| Syntax | Description | +| --------- |----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `LAYER_MODEL_PATH` | This is the Layer model path in form of `/[organization_name]/[project_name]/models/[model_name]`. You can use only the model name if you want to use an AutoML model within the same dbt project. | +| `FEATURES` | These are the columns that this model requires to make a prediction. You should pass the columns as a list like `ARRAY[column1, column2, column3]`. | + +_Example:_ + +Check out [Cloth Detection Project](https://github.com/layerai/dbt-layer/tree/mecevit/update-docs/examples/cloth_detector): + +```sql +SELECT + id, + layer.predict("layer/clothing/models/objectdetection", ARRAY[image]) +FROM + {{ ref("products") }} +``` + + diff --git a/website/docs/reference/warehouse-setups/materialize-setup.md b/website/docs/reference/warehouse-setups/materialize-setup.md new file mode 100644 index 00000000000..684f7174a9f --- /dev/null +++ b/website/docs/reference/warehouse-setups/materialize-setup.md @@ -0,0 +1,121 @@ +--- +title: "Materialize setup" +id: "materialize-setup" +meta: + maintained_by: Materialize Inc. + authors: 'Materialize team' + github_repo: 'MaterializeInc/materialize/blob/main/misc/dbt-materialize' + min_core_version: 'v0.18.0' + cloud_support: Not Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-materialize' + slack_channel_link: 'https://getdbt.slack.com/archives/C01PWAH41A5' + platform_name: 'Materialize' + config_page: 'materialize-configs' +--- + +:::info Vendor-supported plugin + +Certain core functionality may vary. If you would like to report a bug, request a feature, or contribute, you can check out the linked repository and open an issue. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +## Connecting to Materialize + +Once you have set up a [Materialize account](https://materialize.com/register/), adapt your `profiles.yml` to connect to your instance using the following reference profile configuration: + + + +```yaml +dbt-materialize: + target: dev + outputs: + dev: + type: materialize + host: [host] + port: [port] + user: [user@domain.com] + pass: [password] + dbname: [database] + cluster: [cluster] # default 'default' + schema: [dbt schema] + sslmode: require + keepalives_idle: 0 # default 0, indicating the system default + connect_timeout: 10 # default 10 seconds + retries: 1 # default 1 retry on error/timeout when opening connections +``` + + + +### Configurations + +`cluster`: The default [cluster](https://materialize.com/docs/overview/key-concepts/#clusters) is used to maintain materialized views or indexes. A [`default` cluster](https://materialize.com/docs/sql/show-clusters/#default-cluster) is pre-installed in every environment, but we recommend creating dedicated clusters to isolate the workloads in your dbt project (for example, `staging` and `data_mart`). + +`keepalives_idle`: The number of seconds before sending a ping to keep the Materialize connection active. If you are encountering `SSL SYSCALL error: EOF detected`, you may want to lower the [keepalives_idle](https://docs.getdbt.com/reference/warehouse-setups/postgres-setup#keepalives_idle) value to prevent the database from closing its connection. + +To test the connection to Materialize, run: + +``` +dbt debug +``` + +If the output reads "All checks passed!", you’re good to go! Check the [dbt and Materialize guide](https://materialize.com/docs/guides/dbt/) to learn more and get started. + +## Supported Features + +### Materializations + +Because Materialize is optimized for transformations on streaming data and the core of dbt is built around batch, the `dbt-materialize` adapter implements a few custom materialization types: + +Type | Supported? | Details +-----|------------|---------------- +`source` | YES | Creates a [source](https://materialize.com/docs/sql/create-source/). +`view` | YES | Creates a [view](https://materialize.com/docs/sql/create-view/#main). +`materializedview` | YES | Creates a [materialized view](https://materialize.com/docs/sql/create-materialized-view/#main). +`table` | YES | Creates a [materialized view](https://materialize.com/docs/sql/create-materialized-view/#main). (Actual table support pending [#5266](https://github.com/MaterializeInc/materialize/issues/5266)) +`sink` | YES | Creates a [sink](https://materialize.com/docs/sql/create-sink/#main). +`ephemeral` | YES | Executes queries using CTEs. +`incremental` | NO | Use the `materializedview` instead. Materialized views will always return up-to-date results without manual or configured refreshes. For more information, check out [Materialize documentation](https://materialize.com/docs/). + +### Indexes + +Materialized views (`materializedview`), views (`view`) and sources (`source`) may have a list of [`indexes`](resource-configs/materialize-configs/indexes) defined. + +### Seeds + +Running [`dbt seed`](commands/seed) will create a static materialized from a CSV file. You will not be able to add to or update this view after it has been created. + +### Tests + +Running [`dbt test`](commands/test) with the optional `--store-failures` flag or [`store_failures` config](resource-configs/store_failures) will create a materialized view for each test you've chosen to store. This view is a continuously updating representation of failures. + +## Resources + +- [dbt and Materialize guide](https://materialize.com/docs/guides/dbt/) \ No newline at end of file diff --git a/website/docs/reference/warehouse-setups/mindsdb-setup.md b/website/docs/reference/warehouse-setups/mindsdb-setup.md new file mode 100644 index 00000000000..9c5a691aa80 --- /dev/null +++ b/website/docs/reference/warehouse-setups/mindsdb-setup.md @@ -0,0 +1,80 @@ +--- +title: "MindsDB setup" +id: "mindsdb-setup" +meta: + maintained_by: MindsDB + authors: 'MindsDB team' + github_repo: 'mindsdb/dbt-mindsdb' + pypi_package: 'dbt-mindsdb' + min_core_version: 'v1.0.1' + cloud_support: Not Supported + min_supported_version: '?' + slack_channel_name: 'n/a' + slack_channel_link: 'https://www.getdbt.com/community' + platform_name: 'MindsDB' + config_page: 'mindsdb-configs' +--- + +:::info Vendor-supported plugin + +The dbt-mindsdb package allows dbt to connect to [MindsDB](https://github.com/mindsdb/mindsdb). + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +## Installation + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    +s + +## Configurations + +Basic `profile.yml` for connecting to MindsDB: + +```yml +mindsdb: + outputs: + dev: + database: 'mindsdb' + host: '127.0.0.1' + password: '' + port: 47335 + schema: 'mindsdb' + type: mindsdb + username: 'mindsdb' + target: dev + +``` +| Key | Required | Description | Example | +| -------- | -------- | ---------------------------------------------------- | ------------------------------ | +| type | ✔️ | The specific adapter to use | `mindsdb` | +| host | ✔️ | The MindsDB (hostname) to connect to | `cloud.mindsdb.com` | +| port | ✔️ | The port to use | `3306` or `47335` | +| schema | ✔️ | Specify the schema (database) to build models into | The MindsDB datasource | +| username | ✔️ | The username to use to connect to the server | `mindsdb` or mindsdb cloud user| +| password | ✔️ | The password to use for authenticating to the server | `pass | + + diff --git a/website/docs/reference/warehouse-setups/mssql-setup.md b/website/docs/reference/warehouse-setups/mssql-setup.md new file mode 100644 index 00000000000..6d786d9349b --- /dev/null +++ b/website/docs/reference/warehouse-setups/mssql-setup.md @@ -0,0 +1,459 @@ +--- +title: "Microsoft SQL Server setup" +id: "mssql-setup" +meta: + maintained_by: Community + authors: 'dbt-msft community (https://github.com/dbt-msft)' + github_repo: 'dbt-msft/dbt-sqlserver' + pypi_package: 'dbt-sqlserver' + min_core_version: 'v0.14.0' + cloud_support: Not Supported + min_supported_version: 'SQL Server 2016' + slack_channel_name: '#db-sqlserver' + slack_channel_link: 'https://getdbt.slack.com/archives/CMRMDDQ9W' + platform_name: 'SQL Server' + config_page: 'mssql-configs' +--- + +:::info Community plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +:::tip Default settings change in dbt-sqlserver v1.2 / ODBC Driver 18 +Microsoft made several changes related to connection encryption. Read more about the changes [below](#connection-encryption). +::: + +### Prerequisites + +On Debian/Ubuntu make sure you have the ODBC header files before installing + +```bash +sudo apt install unixodbc-dev +``` + +Download and install the [Microsoft ODBC Driver 18 for SQL Server](https://docs.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server?view=sql-server-ver15). +If you already have ODBC Driver 17 installed, then that one will work as well. + +The adapter is tested with SQL Server 2017, SQL Server 2019, SQL Server 2022 and Azure SQL Database. These versions are tested with Microsoft ODBC Driver 17 and Microsoft ODBC Driver 18. + +## Authentication methods & profile configuration + +### Common configuration + +For all the authentication methods below, the following configuration options can be set in your `profiles.yml` file: + +* `driver`: The ODBC driver to use. E.g. `ODBC Driver 18 for SQL Server` +* `server`: The server hostname. E.g. `localhost` +* `port`: The server port. E.g. `1433` +* `database`: The database name. +* `schema`: The schema name. E.g. `dbo` +* `retries`: The number of automatic times to retry a query before failing. Defaults to `1`. Note that queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues. +* `encrypt`: Whether to encrypt the connection to the server. Defaults to `true`. Read more about encryption [below](#connection-encryption). +* `trust_cert`: Whether to trust the server certificate. Defaults to `false`. Read more about encryption [below](#connection-encryption). + +### Connection encryption + +Microsoft made several changes in the release of ODBC Driver 18 that affects how connection encryption is configured. +To accommodate these changes, starting in dbt-sqlserver 1.2.0 or newer the default vallues of `encrypt` and `trust_cert` have changed. +Both of these settings will now **always** be included in the connection string to the server, regardless if you've left them out of your profile configuration or not. + +* The default value of `encrypt` is `true`, meaning that connections are encrypted by default. +* The default value of `trust_cert` is `false`, meaning that the server certificate will be validated. By setting this to `true`, a self-signed certificate will be accepted. + +More details about how these values affect your connection and how they are used differently in versions of the ODBC driver can be found in the [Microsoft documentation](https://learn.microsoft.com/en-us/sql/connect/odbc/dsn-connection-string-attribute?view=sql-server-ver16#encrypt). + +### Standard SQL Server authentication + +SQL Server credentials are supported for on-premise servers as well as Azure, +and it is the default authentication method for `dbt-sqlserver`. + +When running on Windows, you can also use your Windows credentials to authenticate. + + + + + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: database + schema: schema_name + user: username + password: password +``` + + + + + + + + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + windows_login: True +``` + + + + + + + +### Azure Active Directory Authentication (AAD) + +While you can use the SQL username and password authentication as mentioned above, +you might opt to use one of the authentication methods below for Azure SQL. + +The following additional methods are available to authenticate to Azure SQL products: + +* AAD username and password +* Service principal (a.k.a. AAD Application) +* Managed Identity +* Environment-based authentication +* Azure CLI authentication +* VS Code authentication (available through the automatic option below) +* Azure PowerShell module authentication (available through the automatic option below) +* Automatic authentication + +The automatic authentication setting is in most cases the easiest choice and works for all of the above. + + + + + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + authentication: ActiveDirectoryPassword + user: bill.gates@microsoft.com + password: iheartopensource +``` + + + + + + + +Client ID is often also referred to as Application ID. + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + authentication: ServicePrincipal + tenant_id: 00000000-0000-0000-0000-000000001234 + client_id: 00000000-0000-0000-0000-000000001234 + client_secret: S3cret! +``` + + + + + + + +Both system-assigned and user-assigned managed identities will work. + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + authentication: MSI +``` + + + + + + + +This authentication option allows you to dynamically select an authentication method depending on the available environment variables. + +[The Microsoft docs on EnvironmentCredential](https://docs.microsoft.com/en-us/python/api/azure-identity/azure.identity.environmentcredential?view=azure-python) +explain the available combinations of environment variables you can use. + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + authentication: environment +``` + + + + + + + +First, install the [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli), then, log in: + +`az login` + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + authentication: CLI +``` + + + + + + + +This authentication option will automatically try to use all available authentication methods. + +The following methods are tried in order: + +1. Environment-based authentication +2. Managed Identity authentication +3. Visual Studio authentication (*Windows only, ignored on other operating systems*) +4. Visual Studio Code authentication +5. Azure CLI authentication +6. Azure PowerShell module authentication + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + authentication: auto +``` + + + + + + + +#### Additional options for AAD on Windows + +On Windows systems, the following additional authentication methods are also available for Azure SQL: + +* AAD interactive +* AAD integrated +* Visual Studio authentication (available through the automatic option above) + + + + + +This setting can optionally show Multi-Factor Authentication prompts. + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + authentication: ActiveDirectoryInteractive + user: bill.gates@microsoft.com +``` + + + + + + + +This uses the credentials you're logged in with on the current machine. + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlserver + driver: 'ODBC Driver 18 for SQL Server' # (The ODBC Driver installed on your system) + server: hostname or IP of your server + port: 1433 + database: exampledb + schema: schema_name + authentication: ActiveDirectoryIntegrated +``` + + + + + + + +### Automatic AAD principal provisioning for grants + +In dbt 1.2 or newer you can use the [grants](https://docs.getdbt.com/reference/resource-configs/grants) config block to automatically grant/revoke permissions on your models to users or groups. This is fully supported in this adapter and comes with an additional feature. + +By setting `auto_provision_aad_principals` to `true` in your model configuration, you can automatically provision Azure Active Directory (AAD) principals (users or groups) that don't exist yet. + +In Azure SQL, you can sign in using AAD authentication, but to be able to grant an AAD principal certain permissions, it needs to be linked in the database first. ([Microsoft documentation](https://learn.microsoft.com/en-us/azure/azure-sql/database/authentication-aad-configure?view=azuresql)) + +Note that principals will not be deleted automatically when they are removed from the `grants` block. + +### Reference of all connection options + +| configuration option | description | required | default value | +|----------------------|-------------------------------------------------------------------------------------------------------------------------------------------------|--------------------|---------------| +| `driver` | The ODBC driver to use. | :white_check_mark: | | +| `host` | The hostname of the database server. | :white_check_mark: | | +| `port` | The port of the database server. | | `1433` | +| `database` | The name of the database to connect to. | :white_check_mark: | | +| `schema` | The schema to use. | :white_check_mark: | | +| `authentication` | The authentication method to use. This is not required for Windows authentication. | | `'sql'` | | | | +| `UID` | Username used to authenticate. This can be left out depending on the authentication method. | | | +| `PWD` | Password used to authenticate. This can be left out depending on the authentication method. | | | +| `windows_login` | Set this to `true` to use Windows authentication. This is only available for SQL Server. | | | +| `tenant_id` | The tenant ID of the Azure Active Directory instance. This is only used when connecting to Azure SQL with a service principal. | | | +| `client_id` | The client ID of the Azure Active Directory service principal. This is only used when connecting to Azure SQL with an AAD service principal. | | | +| `client_secret` | The client secret of the Azure Active Directory service principal. This is only used when connecting to Azure SQL with an AAD service principal. | | | +| `encrypt` | Set this to `false` to disable the use of encryption. See [above](#connection-encryption). | | `true` | +| `trust_cert` | Set this to `true` to trust the server certificate. See [above](#connection-encryption). | | `false` | +| `retries` | The number of times to retry a failed connection. | | `1` | + +Valid values for `authentication`: + +* `sql`: SQL authentication using username and password +* `ActiveDirectoryPassword`: Active Directory authentication using username and password +* `ActiveDirectoryInteractive`: Active Directory authentication using a username and MFA prompts +* `ActiveDirectoryIntegrated`: Active Directory authentication using the current user's credentials +* `ServicePrincipal`: Azure Active Directory authentication using a service principal +* `CLI`: Azure Active Directory authentication using the account you're logged in with in the Azure CLI +* `MSI`: Azure Active Directory authentication using a managed identity available on the system +* `environment`: Azure Active Directory authentication using environment variables as documented [here](https://learn.microsoft.com/en-us/python/api/azure-identity/azure.identity.environmentcredential?view=azure-python) +* `auto`: Azure Active Directory authentication trying the previous authentication methods until it finds one that works diff --git a/website/docs/reference/warehouse-setups/mysql-setup.md b/website/docs/reference/warehouse-setups/mysql-setup.md new file mode 100644 index 00000000000..c91e48c30ec --- /dev/null +++ b/website/docs/reference/warehouse-setups/mysql-setup.md @@ -0,0 +1,122 @@ +--- +title: "MySQL setup" +id: "mysql-setup" +meta: + maintained_by: Community + authors: 'Doug Beatty (https://github.com/dbeatty10)' + github_repo: 'dbeatty10/dbt-mysql' + pypi_package: 'dbt-mysql' + min_core_version: 'v0.18.0' + cloud_support: Not Supported + min_supported_version: 'MySQL 5.7 and 8.0' + slack_channel_name: '#db-mysql-family' + slack_channel_link: 'https://getdbt.slack.com/archives/C03BK0SHC64' + platform_name: 'MySQL' + config_page: 'no-configs' +--- + +:::info Community plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +This is an experimental plugin: +- It has not been tested extensively. +- Storage engines other than the default of InnoDB are untested. +- Only tested with [dbt-adapter-tests](https://github.com/dbt-labs/dbt-adapter-tests) with the following versions: + - MySQL 5.7 + - MySQL 8.0 + - MariaDB 10.5 +- Compatibility with other [dbt packages](https://hub.getdbt.com/) (like [dbt_utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/)) are also untested. + +Please read these docs carefully and use at your own risk. [Issues](https://github.com/dbeatty10/dbt-mysql/issues/new) and [PRs](https://github.com/dbeatty10/dbt-mysql/blob/main/CONTRIBUTING.rst#contributing) welcome! + + +## Connecting to MySQL with dbt-mysql + +MySQL targets should be set up using the following configuration in your `profiles.yml` file. + +Example: + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: mysql + server: localhost + port: 3306 + schema: analytics + username: your_mysql_username + password: your_mysql_password + ssl_disabled: True +``` + + + +#### Description of MySQL Profile Fields + +| Option | Description | Required? | Example | +| --------------- | ----------------------------------------------------------------------------------- | ------------------------------------------------------------------ | ---------------------------------------------- | +| type | The specific adapter to use | Required | `mysql`, `mysql5` or `mariadb` | +| server | The server (hostname) to connect to | Required | `yourorg.mysqlhost.com` | +| port | The port to use | Optional | `3306` | +| schema | Specify the schema (database) to build models into | Required | `analytics` | +| username | The username to use to connect to the server | Required | `dbt_admin` | +| password | The password to use for authenticating to the server | Required | `correct-horse-battery-staple` | +| ssl_disabled | Set to enable or disable TLS connectivity to mysql5.x | Optional | `True` or `False` | + +## Supported features + +| MariaDB 10.5 | MySQL 5.7 | MySQL 8.0 | Feature | +|:---------:|:---------:|:---:|-----------------------------| +| ✅ | ✅ | ✅ | Table materialization | +| ✅ | ✅ | ✅ | View materialization | +| ✅ | ✅ | ✅ | Incremental materialization | +| ✅ | ❌ | ✅ | Ephemeral materialization | +| ✅ | ✅ | ✅ | Seeds | +| ✅ | ✅ | ✅ | Sources | +| ✅ | ✅ | ✅ | Custom data tests | +| ✅ | ✅ | ✅ | Docs generate | +| 🤷 | 🤷 | ✅ | Snapshots | + +## Notes +- Ephemeral materializations rely upon [Common Table Expressions](https://en.wikipedia.org/wiki/Hierarchical_and_recursive_queries_in_SQL) (CTEs), which are not supported until MySQL 8.0. +- MySQL 5.7 has some configuration gotchas that might affect dbt snapshots to not work properly due to [automatic initialization and updating for `TIMESTAMP`](https://dev.mysql.com/doc/refman/5.7/en/timestamp-initialization.html). + - If the output of `SHOW VARIABLES LIKE 'sql_mode'` includes `NO_ZERO_DATE`. A solution is to include the following in a `*.cnf` file: + ``` + [mysqld] + explicit_defaults_for_timestamp = true + sql_mode = "ALLOW_INVALID_DATES,{other_sql_modes}" + ``` + - Where `{other_sql_modes}` is the rest of the modes from the `SHOW VARIABLES LIKE 'sql_mode'` output. diff --git a/website/docs/reference/warehouse-setups/oracle-setup.md b/website/docs/reference/warehouse-setups/oracle-setup.md new file mode 100644 index 00000000000..dfd9ad07207 --- /dev/null +++ b/website/docs/reference/warehouse-setups/oracle-setup.md @@ -0,0 +1,498 @@ +--- +title: "Oracle setup" +id: "oracle-setup" +meta: + maintained_by: Oracle + authors: 'Oracle' + github_repo: 'oracle/dbt-oracle' + pypi_package: 'dbt-oracle' + min_core_version: 'v1.2.1' + cloud_support: Not Supported + min_supported_version: 'SQlite Version 3.0' + slack_channel_name: '#db-oracle' + slack_channel_link: 'https://getdbt.slack.com/archives/C01PWH4TXLY' + platform_name: 'Oracle' + config_page: 'no-configs' +--- + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + +## Installation + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +### Configure the Python driver mode + +:::info +[python-oracledb](https://oracle.github.io/python-oracledb/) is the renamed, major release of Oracle's popular cx_Oracle interface +::: + +[python-oracledb](https://oracle.github.io/python-oracledb/) makes it optional to install the Oracle Client libraries. +This driver supports 2 modes + +1. **Thin mode (preferred) ** : Python process directly connects to the Oracle database. This mode does not need the Oracle Client libraries +2. **Thick mode** : Python process links with the Oracle Client libraries. Some advanced Oracle database functionalities (for e.g. Advanced Queuing and Scrollable cursors) are currently available via Oracle Client libraries + +It is highly recommended to use the **thin** mode as it vastly simplifies installation. You can configure the driver mode using the environment variable `ORA_PYTHON_DRIVER_TYPE` + +| Driver Mode | Oracle Client libraries required? | Configuration | +|------------------------|-----------------------------------| ------------- | +| Thin | No | `ORA_PYTHON_DRIVER_TYPE=thin`| +| Thick | Yes | `ORA_PYTHON_DRIVER_TYPE=thick` | +| cx_oracle (old driver) | Yes | `ORA_PYTHON_DRIVER_TYPE=cx` | + +The default value of `ORA_PYTHON_DRIVER_TYPE` is `cx`. This might change in the future as more users migrate towards the new python driver. + + + + + + ```bash + export ORA_PYTHON_DRIVER_TYPE=thin + ``` + + + + + + ```bash + export ORA_PYTHON_DRIVER_TYPE=thick + # or + export ORA_PYTHON_DRIVER_TYPE=cx # default + ``` + +### Install Oracle Instant Client libraries + +In thick mode or the old cx_oracle mode, you will need the [Oracle Instant Client libraries](https://www.oracle.com/database/technologies/instant-client.html) installed. These provide the necessary network connectivity allowing dbt-oracle to access an Oracle Database instance. + +Oracle client libraries versions 21, 19, 18, 12, and 11.2 are supported where available on Linux, Windows and macOS (Intel x86). It is recommended to use the latest client possible: Oracle’s standard client-server version interoperability allows connection to both older and newer databases. + + + + + +1. Download an Oracle 21, 19, 18, 12, or 11.2 “Basic” or “Basic Light” zip file matching your Python 64-bit or 32-bit architecture: + 1. [x86-64 64-bit](https://www.oracle.com/database/technologies/instant-client/linux-x86-64-downloads.html) + 2. [x86 32-bit](https://www.oracle.com/database/technologies/instant-client/linux-x86-32-downloads.html) + 3. [ARM (aarch64) 64-bit](https://www.oracle.com/database/technologies/instant-client/linux-arm-aarch64-downloads.html) + +2. Unzip the package into a single directory that is accessible to your application. For example: + ```bash + mkdir -p /opt/oracle + cd /opt/oracle + unzip instantclient-basic-linux.x64-21.1.0.0.0.zip + ``` + +3. Install the libaio package with sudo or as the root user. For example: + ```bash + sudo yum install libaio + ``` + On some Linux distributions this package is called `libaio1` instead. + + + +4. if there is no other Oracle software on the machine that will be impacted, permanently add Instant Client to the runtime link path. For example, with sudo or as the root user: + + ```bash + sudo sh -c "echo /opt/oracle/instantclient_21_1 > /etc/ld.so.conf.d/oracle-instantclient.conf" + sudo ldconfig + ``` + + Alternatively, set the environment variable `LD_LIBRARY_PATH` + + ```bash + export LD_LIBRARY_PATH=/opt/oracle/instantclient_21_1:$LD_LIBRARY_PATH + ``` + + + + + +1. Download an Oracle 21, 19, 18, 12, or 11.2 “Basic” or “Basic Light” zip file: [64-bit](https://www.oracle.com/database/technologies/instant-client/winx64-64-downloads.html) or [32-bit](https://www.oracle.com/database/technologies/instant-client/microsoft-windows-32-downloads.html), matching your Python architecture. + +:::info Windows 7 users +Note that Oracle Client versions 21c and 19c are not supported on Windows 7. +::: + +2. Unzip the package into a directory that is accessible to your application. For example unzip `instantclient-basic-windows.x64-19.11.0.0.0dbru.zip` to `C:\oracle\instantclient_19_11`. + +3. Oracle Instant Client libraries require a Visual Studio redistributable with a 64-bit or 32-bit architecture to match Instant Client’s architecture. + 1. For Instant Client 21 install [VS 2019](https://docs.microsoft.com/en-US/cpp/windows/latest-supported-vc-redist?view=msvc-170) or later + 2. For Instant Client 19 install [VS 2017](https://docs.microsoft.com/en-US/cpp/windows/latest-supported-vc-redist?view=msvc-170) + 3. For Instant Client 18 or 12.2 install [VS 2013](https://docs.microsoft.com/en-US/cpp/windows/latest-supported-vc-redist?view=msvc-170#visual-studio-2013-vc-120) + 4. For Instant Client 12.1 install [VS 2010](https://docs.microsoft.com/en-US/cpp/windows/latest-supported-vc-redist?view=msvc-170#visual-studio-2010-vc-100-sp1-no-longer-supported) + 5. For Instant Client 11.2 install [VS 2005 64-bit](https://docs.microsoft.com/en-US/cpp/windows/latest-supported-vc-redist?view=msvc-170#visual-studio-2005-vc-80-sp1-no-longer-supported) + +4. Add the Oracle Instant Client directory to the `PATH` environment variable.The directory must occur in `PATH` before any other Oracle directories. Restart any open command prompt windows. + + ```bash + SET PATH=C:\oracle\instantclient_19_9;%PATH% + ``` + + + + + +1. Download the instant client DMG package + + ```bash + cd $HOME/Downloads + curl -O https://download.oracle.com/otn_software/mac/instantclient/198000/instantclient-basic-macos.x64-19.8.0.0.0dbru.dmg + ``` + +2. Mount the instant client DMG package + + ```bash + hdiutil mount instantclient-basic-macos.x64-19.8.0.0.0dbru.dmg + + ``` + +3. Run the install script in the mounted package + + ```bash + /Volumes/instantclient-basic-macos.x64-19.8.0.0.0dbru/install_ic.sh + ``` + +4. Unmount the package + + ```bash + hdiutil unmount /Volumes/instantclient-basic-macos.x64-19.8.0.0.0dbru + ``` + +5. The Instant Client directory will be `$HOME/Downloads/instantclient_19_8`. You could move it to some place convenient. + +6. Add links to `~/lib` or `/usr/local/lib` to enable dbt to find the libraries. + + ```bash + mkdir ~/lib + ln -s ~/instantclient_19_8/libclntsh.dylib ~/lib/ + ``` + + + + + + + + +## Configure wallet for Oracle Autonomous Database in Cloud + +dbt can connect to Oracle Autonomous Database (ADB) in Oracle Cloud using either TLS (Transport Layer Security) or mutual TLS (mTLS). TLS and mTLS provide enhanced security for authentication and encryption. +A database username and password is still required for dbt connections which can be configured as explained in the next section [Connecting to Oracle Database](#connecting-to-oracle-database). + + + + + +With TLS, dbt can connect to Oracle ADB without using a wallet. Both Thin and Thick modes of the python-oracledb driver support TLS. + +:::info +In Thick mode, dbt can connect through TLS only when using Oracle Client library versions 19.14 (or later) or 21.5 (or later). +::: + +Refer to Oracle documentation to [connect to an ADB instance using TLS authentication](https://docs.oracle.com/en/cloud/paas/autonomous-database/adbsa/connecting-nodejs-tls.html#GUID-B3809B88-D2FB-4E08-8F9B-65A550F93A07) and the blog post [Easy wallet-less connections to Oracle Autonomous Databases in Python](https://blogs.oracle.com/opal/post/easy-way-to-connect-python-applications-to-oracle-autonomous-databases) to enable TLS for your Oracle ADB instance. + + + + +For mutual TLS connections, a wallet needs be downloaded from the OCI console and the python driver needs to be configured to use it. + +#### Install the Wallet and Network Configuration Files + +From the Oracle Cloud console for the database, download the wallet zip file using the `DB Connection` button. The zip contains the wallet and network configuration files. + +:::warning Note +Keep wallet files in a secure location and share them only with authorized users. +::: + +Unzip the wallet zip file. + + + + +In Thin mode, only two files from the zip are needed: + +- `tnsnames.ora` - Maps net service names used for application connection strings to your database services + +- `ewallet.pem` - Enables SSL/TLS connections in Thin mode. Keep this file secure + +After unzipping the files in a secure directory, set the **TNS_ADMIN** and **WALLET_LOCATION** environment variables to the directory name. + +```bash +export WALLET_LOCATION=/path/to/directory_containing_ewallet.pem +export WALLET_PASSWORD=*** +export TNS_ADMIN=/path/to/directory_containing_tnsnames.ora +``` +Optionally, if `ewallet.pem` file is encrypted using a wallet password, specify the password using environment variable **WALLET_PASSWORD** + + + + +In Thick mode, the following files from the zip are needed: + +- `tnsnames.ora` - Maps net service names used for application connection strings to your database services +- `sqlnet.ora` - Configures Oracle Network settings +- `cwallet.sso` - Enables SSL/TLS connections + +After unzipping the files in a secure directory, set the **TNS_ADMIN** environment variable to that directory name. + +```bash +export TNS_ADMIN=/path/to/directory_containing_tnsnames.ora +``` + +Next, edit the `sqlnet.ora` file to point to the wallet directory. + + + +```text +WALLET_LOCATION = (SOURCE = (METHOD = file) (METHOD_DATA = (DIRECTORY="/path/to/wallet/directory"))) +SSL_SERVER_DN_MATCH=yes +``` + + + + + + + + + + +## Connecting to Oracle Database + +Define the following mandatory parameters as environment variables and refer them in the connection profile using [env_var](https://docs.getdbt.com/reference/dbt-jinja-functions/env_var) jinja function. Optionally, you can also define these directly in the `profiles.yml` file, but this is not recommended + +```bash +export DBT_ORACLE_USER= +export DBT_ORACLE_PASSWORD=*** +export DBT_ORACLE_SCHEMA= +``` + +Starting with `dbt-oracle==1.0.2`, it is **optional** to set the database name + +```bash +export DBT_ORACLE_DATABASE=example_db2022adb +``` + +If database name is not set, adapter will retrieve it using the following query. + +```sql +SELECT SYS_CONTEXT('userenv', 'DB_NAME') FROM DUAL +``` + +An Oracle connection profile for dbt can be set using any one of the following methods + + + + + +The `tnsnames.ora` file is a configuration file that contains network service names mapped to connect descriptors. +The directory location of `tnsnames.ora` file can be specified using `TNS_ADMIN` environment variable + + + +```text +db2022adb_high = (description = + (retry_count=20)(retry_delay=3) + (address=(protocol=tcps) + (port=1522) + (host=adb.example.oraclecloud.com)) + (connect_data=(service_name=example_high.adb.oraclecloud.com)) + (security=(ssl_server_cert_dn="CN=adb.example.oraclecloud.com, + OU=Oracle BMCS US,O=Oracle Corporation,L=Redwood City,ST=California,C=US"))) +``` + + + +The TNS alias `db2022adb_high` can be defined as environment variable and referred in `profiles.yml` + +```bash +export DBT_ORACLE_TNS_NAME=db2022adb_high +``` + + + +```yaml +dbt_test: + target: dev + outputs: + dev: + type: oracle + user: "{{ env_var('DBT_ORACLE_USER') }}" + pass: "{{ env_var('DBT_ORACLE_PASSWORD') }}" + database: "{{ env_var('DBT_ORACLE_DATABASE') }}" + tns_name: "{{ env_var('DBT_ORACLE_TNS_NAME') }}" + schema: "{{ env_var('DBT_ORACLE_SCHEMA') }}" + threads: 4 +``` + + + + + +The connection string identifies which database service to connect to. It can be one of the following + +- An [Oracle Easy Connect String](https://docs.oracle.com/en/database/oracle/oracle-database/21/netag/configuring-naming-methods.html#GUID-B0437826-43C1-49EC-A94D-B650B6A4A6EE) +- An Oracle Net Connect Descriptor String +- A Net Service Name mapping to a connect descriptor + +```bash +export DBT_ORACLE_CONNECT_STRING="(description=(retry_count=20)(retry_delay=3)(address=(protocol=tcps)(port=1522) + (host=adb.example.oraclecloud.com))(connect_data=(service_name=example_high.adb.oraclecloud.com)) + (security=(ssl_server_cert_dn=\"CN=adb.example.oraclecloud.com, + OU=Oracle BMCS US,O=Oracle Corporation,L=Redwood City,ST=California,C=US\")))" +``` + + + +```yaml +dbt_test: + target: "{{ env_var('DBT_TARGET', 'dev') }}" + outputs: + dev: + type: oracle + user: "{{ env_var('DBT_ORACLE_USER') }}" + pass: "{{ env_var('DBT_ORACLE_PASSWORD') }}" + database: "{{ env_var('DBT_ORACLE_DATABASE') }}" + schema: "{{ env_var('DBT_ORACLE_SCHEMA') }}" + connection_string: "{{ env_var('DBT_ORACLE_CONNECT_STRING') }}" +``` + + + + + + +To connect using the database hostname or IP address, you need to specify the following +- host +- port (1521 or 1522) +- protocol (tcp or tcps) +- service + +```bash +export DBT_ORACLE_HOST=adb.example.oraclecloud.com +export DBT_ORACLE_SERVICE=example_high.adb.oraclecloud.com +``` + + + + +```yaml +dbt_test: + target: "{{ env_var('DBT_TARGET', 'dev') }}" + outputs: + dev: + type: oracle + user: "{{ env_var('DBT_ORACLE_USER') }}" + pass: "{{ env_var('DBT_ORACLE_PASSWORD') }}" + protocol: "tcps" + host: "{{ env_var('DBT_ORACLE_HOST') }}" + port: 1522 + service: "{{ env_var('DBT_ORACLE_SERVICE') }}" + database: "{{ env_var('DBT_ORACLE_DATABASE') }}" + schema: "{{ env_var('DBT_ORACLE_SCHEMA') }}" + retry_count: 1 + retry_delay: 3 + threads: 4 +``` + + + + + + +```yaml +dbt_test: + target: "{{ env_var('DBT_TARGET', 'dev') }}" + outputs: + dev: + type: oracle + user: "{{ env_var('DBT_ORACLE_USER') }}" + pass: "{{ env_var('DBT_ORACLE_PASSWORD') }}" + protocol: "tcps" + host: "{{ env_var('DBT_ORACLE_HOST') }}" + port: 1522 + service: "{{ env_var('DBT_ORACLE_SERVICE') }}" + database: "{{ env_var('DBT_ORACLE_DATABASE') }}" + schema: "{{ env_var('DBT_ORACLE_SCHEMA') }}" + threads: 4 +``` + + + + + + + + +## Supported Features + +- Table materialization +- View materialization +- Incremental materialization +- Seeds +- Data sources +- Singular tests +- Generic tests; Not null, Unique, Accepted values and Relationships +- Operations +- Analyses +- Exposures +- Document generation +- Serve project documentation as a website +- All dbt commands are supported + +## Not Supported features +- Ephemeral materialization + diff --git a/website/docs/reference/warehouse-setups/postgres-setup.md b/website/docs/reference/warehouse-setups/postgres-setup.md new file mode 100644 index 00000000000..0955f731974 --- /dev/null +++ b/website/docs/reference/warehouse-setups/postgres-setup.md @@ -0,0 +1,106 @@ +--- +title: "Postgres setup" +id: "postgres-setup" +meta: + maintained_by: dbt Labs + authors: 'core dbt maintainers' + github_repo: 'dbt-labs/dbt-core' + pypi_package: 'dbt-postgres' + min_core_version: 'v0.4.0' + cloud_support: Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-postgres' + slack_channel_link: 'https://getdbt.slack.com/archives/C0172G2E273' + platform_name: 'Postgres' + config_page: 'postgres-configs' +--- + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +## Profile Configuration + +Postgres targets should be set up using the following configuration in your `profiles.yml` file. + + + +```yaml +company-name: + target: dev + outputs: + dev: + type: postgres + host: [hostname] + user: [username] + password: [password] + port: [port] + dbname: [database name] + schema: [dbt schema] + threads: [1 or more] + [keepalives_idle](#keepalives_idle): 0 # default 0, indicating the system default. See below + connect_timeout: 10 # default 10 seconds + [retries](#retries): 1 # default 1 retry on error/timeout when opening connections + [search_path](#search_path): [optional, override the default postgres search_path] + [role](#role): [optional, set the role dbt assumes when executing queries] + [sslmode](#sslmode): [optional, set the sslmode used to connect to the database] + +``` + + + +### Configurations + +#### search_path + +The `search_path` config controls the Postgres "search path" that dbt configures when opening new connections to the database. By default, the Postgres search path is `"$user, public"`, meaning that unqualified names will be searched for in the `public` schema, or a schema with the same name as the logged-in user. **Note:** Setting the `search_path` to a custom value is not necessary or recommended for typical usage of dbt. + +#### role + + Added in v0.16.0 + +The `role` config controls the Postgres role that dbt assumes when opening new connections to the database. + +#### sslmode + + Added in v0.16.0 + +The `sslmode` config controls how dbt connectes to Postgres databases using SSL. See [the Postgres docs](https://www.postgresql.org/docs/9.1/libpq-ssl.html) on `sslmode` for usage information. When unset, dbt will connect to databases using the Postgres default, `prefer`, as the `sslmode`. + +### `keepalives_idle` +If the database closes its connection while dbt is waiting for data, you may see the error `SSL SYSCALL error: EOF detected`. Lowering the [`keepalives_idle` value](https://www.postgresql.org/docs/9.3/libpq-connect.html) may prevent this, because the server will send a ping to keep the connection active more frequently. + +[dbt's default setting](https://github.com/dbt-labs/dbt-core/blob/main/plugins/postgres/dbt/adapters/postgres/connections.py#L28) is 0 (the server's default value), but can be configured lower (perhaps 120 or 60 seconds), at the cost of a chattier network connection. + + + +#### retries + +If `dbt-postgres` encounters an operational error or timeout when opening a new connection, it will retry up to the number of times configured by `retries`. The default value is 1 retry. If set to 2+ retries, dbt will wait 1 second before retrying. If set to 0, dbt will not retry at all. + + diff --git a/website/docs/reference/warehouse-profiles/redshift-profile.md b/website/docs/reference/warehouse-setups/redshift-setup.md similarity index 53% rename from website/docs/reference/warehouse-profiles/redshift-profile.md rename to website/docs/reference/warehouse-setups/redshift-setup.md index 934651f5e23..28276594ba8 100644 --- a/website/docs/reference/warehouse-profiles/redshift-profile.md +++ b/website/docs/reference/warehouse-setups/redshift-setup.md @@ -1,13 +1,47 @@ --- -title: "Redshift Profile" +title: "Redshift setup" +id: "redshift-setup" +meta: + maintained_by: dbt Labs + authors: 'core dbt maintainers' + github_repo: 'dbt-labs/dbt-redshift' + pypi_package: 'dbt-redshift' + min_core_version: 'v0.10.0' + cloud_support: Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-redshift' + slack_channel_link: 'https://getdbt.slack.com/archives/C01DRQ178LQ' + platform_name: 'Redshift' + config_page: 'redshift-configs' --- -## Overview of dbt-redshift -**Maintained by:** core dbt maintainers -**Author:** dbt Labs -**Source:** [Github](https://github.com/dbt-labs/dbt-redshift) -**dbt Cloud:** Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/CJARVS0RY) +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    ## Authentication Methods @@ -62,20 +96,22 @@ my-redshift-db: dev: type: redshift method: iam - cluster_id: [cluster_id] + cluster_id: CLUSTER_ID host: hostname.region.redshift.amazonaws.com user: alice iam_profile: data_engineer # optional iam_duration_seconds: 900 # optional autocreate: true # optional - db_groups: ['analysts'] # optional + db_groups: ['ANALYSTS'] # optional # Other Redshift configs: port: 5439 dbname: analytics schema: analytics threads: 4 - keepalives_idle: 240 # default 240 seconds + [keepalives_idle](#keepalives_idle): 240 # default 240 seconds + connect_timeout: 10 # default 10 seconds + [retries](#retries): 1 # default 1 retry on error/timeout when opening connections # search_path: public # optional, but not recommended sslmode: [optional, set the sslmode used to connect to the database (in case this parameter is set, will look for ca in ~/.postgresql/root.crt)] ra3_node: true # enables cross-database sources @@ -98,4 +134,12 @@ Where possible, dbt enables the use of `sort` and `dist` keys. See the section o ### `keepalives_idle` If the database closes its connection while dbt is waiting for data, you may see the error `SSL SYSCALL error: EOF detected`. Lowering the [`keepalives_idle` value](https://www.postgresql.org/docs/9.3/libpq-connect.html) may prevent this, because the server will send a ping to keep the connection active more frequently. -[dbt's default setting](https://github.com/dbt-labs/dbt-redshift/blob/main/dbt/adapters/redshift/connections.py#L51) is 240 (seconds), but can be configured lower (perhaps 120 or 60), at the cost of a chattier network connection. \ No newline at end of file +[dbt's default setting](https://github.com/dbt-labs/dbt-redshift/blob/main/dbt/adapters/redshift/connections.py#L51) is 240 (seconds), but can be configured lower (perhaps 120 or 60), at the cost of a chattier network connection. + + + +#### retries + +If `dbt-redshift` encounters an operational error or timeout when opening a new connection, it will retry up to the number of times configured by `retries`. If set to 2+ retries, dbt will wait 1 second before retrying. The default value is 1 retry. If set to 0, dbt will not retry at all. + + diff --git a/website/docs/reference/warehouse-setups/rockset-setup.md b/website/docs/reference/warehouse-setups/rockset-setup.md new file mode 100644 index 00000000000..c80b28a5f68 --- /dev/null +++ b/website/docs/reference/warehouse-setups/rockset-setup.md @@ -0,0 +1,83 @@ +--- +title: "Rockset setup" +id: "rockset-setup" +meta: + maintained_by: Rockset, Inc. + authors: 'Rockset, Inc.' + github_repo: 'rockset/dbt-rockset' + pypi_package: 'dbt-rockset' + min_core_version: 'v0.19.2' + cloud_support: Not Supported + min_supported_version: '?' + slack_channel_name: '#dbt-rockset' + slack_channel_link: 'https://getdbt.slack.com/archives/C02J7AZUAMN' + platform_name: 'Rockset' + config_page: 'no-configs' +--- + +:::info Vendor-supported plugin + +Certain core functionality may vary. If you would like to report a bug, request a feature, or contribute, you can check out the linked repository and open an issue. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + +## Connecting to Rockset with **dbt-rockset** + +The dbt profile for Rockset is very simple and contains the following fields: + + + +```yaml +rockset: + target: dev + outputs: + dev: + type: rockset + workspace: [schema] + api_key: [api_key] + api_server: [api_server] # (Default is api.rs2.usw2.rockset.com) +``` + + + +### Materializations + +Type | Supported? | Details +-----|------------|---------------- +view | YES | Creates a [view](https://rockset.com/docs/views/#gatsby-focus-wrapper). +table | YES | Creates a [collection](https://rockset.com/docs/collections/#gatsby-focus-wrapper). +ephemeral | YES | Executes queries using CTEs. +incremental | YES | Creates a [collection](https://rockset.com/docs/collections/#gatsby-focus-wrapper) if it doesn't exist, and then writes results to it. + +## Caveats +1. `unique_key` is not supported with incremental, unless it is set to [_id](https://rockset.com/docs/special-fields/#the-_id-field), which acts as a natural `unique_key` in Rockset anyway. +2. The `table` is slower in Rockset than most due to Rockset's architecture as a low-latency, real-time database. Creating new collections requires provisioning hot storage to index and serve fresh data, which takes about a minute. +3. Rockset queries have a two-minute timeout. Any model which runs a query that takes longer to execute than two minutes will fail. diff --git a/website/docs/reference/warehouse-profiles/singlestore-profile.md b/website/docs/reference/warehouse-setups/singlestore-setup.md similarity index 59% rename from website/docs/reference/warehouse-profiles/singlestore-profile.md rename to website/docs/reference/warehouse-setups/singlestore-setup.md index 37d0d2da68a..d0b88c1f0ac 100644 --- a/website/docs/reference/warehouse-profiles/singlestore-profile.md +++ b/website/docs/reference/warehouse-setups/singlestore-setup.md @@ -1,5 +1,18 @@ --- -title: "SingleStore Profile" +title: "SingleStore setup" +id: "singlestore-setup" +meta: + maintained_by: SingleStore, Inc. + authors: 'SingleStore, Inc.' + github_repo: 'memsql/dbt-singlestore' + pypi_package: 'dbt-singlestore' + min_core_version: 'v1.0.0' + cloud_support: Supported + min_supported_version: 'v7.5' + slack_channel_name: 'db-singlestore' + slack_channel_link: 'https://getdbt.slack.com/archives/C02V2QHFF7U' + platform_name: 'SingleStore' + config_page: 'no-configs' --- :::info Vendor-supported plugin @@ -8,31 +21,35 @@ Certain core functionality may vary. If you would like to report a bug, request ::: -## Overview of dbt-singlestore - -**Maintained by:** SingleStore, Inc. -**Source:** [Github](https://github.com/memsql/dbt-singlestore) -**Core version:** v1.0.0 and newer -**dbt Cloud:** Not Supported -**dbt Slack channel:** [Slack](https://getdbt.slack.com/archives/C02V2QHFF7U) -**SingleStore version:** v7.5 and newer +

    Overview of {frontMatter.meta.pypi_package}

    +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    ## Installation and Distribution -SingleStore dbt adapter is managed in its own repository, [dbt-singlestore](https://github.com/memsql/dbt-singlestore). You can use `pip` to install the SingleStore adapter: -```zsh -pip install dbt-singlestore -``` +

    Installing {frontMatter.meta.pypi_package}

    -Alternatively, you can install the package from GitHub with: +pip is the easiest way to install the adapter: -```zsh -pip install git+https://github.com/memsql/dbt-singlestore.git -``` +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    -For other information including support of dbt features by SingleStore, see the [GitHub README](https://github.com/memsql/dbt-singlestore#readme). +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    ### Set up a SingleStore Target diff --git a/website/docs/reference/warehouse-profiles/snowflake-profile.md b/website/docs/reference/warehouse-setups/snowflake-setup.md similarity index 70% rename from website/docs/reference/warehouse-profiles/snowflake-profile.md rename to website/docs/reference/warehouse-setups/snowflake-setup.md index 16ab6e49811..a22f679e208 100644 --- a/website/docs/reference/warehouse-profiles/snowflake-profile.md +++ b/website/docs/reference/warehouse-setups/snowflake-setup.md @@ -1,13 +1,47 @@ --- -title: "Snowflake Profile" +title: "Snowflake setup" +id: "snowflake-setup" +meta: + maintained_by: dbt Labs + authors: 'core dbt maintainers' + github_repo: 'dbt-labs/dbt-snowflake' + pypi_package: 'dbt-snowflake' + min_core_version: 'v0.8.0' + cloud_support: Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-snowflake' + slack_channel_link: 'https://getdbt.slack.com/archives/C01DRQ178LQ' + platform_name: 'Snowflake' + config_page: 'snowflake-configs' --- -## Overview of dbt-snowflake -**Maintained by:** core dbt maintainers -**Author:** dbt Labs -**Source:** [Github](https://github.com/dbt-labs/dbt-snowflake) -**dbt Cloud:** Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/CJN7XRF1B) +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    ## Authentication Methods @@ -169,18 +203,18 @@ The "base" configs for Snowflake targets are shown below. Note that you should a | user | Yes | The user to log in as | | database | Yes | The database that dbt should create models in | | warehouse | Yes | The warehouse to use when building models | -| schema | Yes | The schema to build models into by default. Can be overridden with [custom schemas](using-custom-schemas) | +| schema | Yes | The schema to build models into by default. Can be overridden with [custom schemas](/docs/build/custom-schemas) | | role | No (but recommended) | The role to assume when running queries as the specified user. | -| client_session_keep_alive | No | If provided, issue a periodic `select` statement to keep the connection open when particularly long-running queries are executing (> 4 hours). Default: False (see note below) | +| client_session_keep_alive | No | If `True`, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours). Default: False (see [note below](#client_session_keep_alive)) | | threads | No | The number of concurrent models dbt should build. Set this to a higher number if using a bigger warehouse. Default=1 | | query_tag | No | A value with which to tag all queries, for later searching in [QUERY_HISTORY view](https://docs.snowflake.com/en/sql-reference/account-usage/query_history.html) | -| retry_all | No | A boolean flag indicating whether to retry on all [Snowflake connector errors](https://github.com/snowflakedb/snowflake-connector-python/blob/master/src/snowflake/connector/errors.py) | +| retry_all | No | A boolean flag indicating whether to retry on all [Snowflake connector errors](https://github.com/snowflakedb/snowflake-connector-python/blob/main/src/snowflake/connector/errors.py) | | retry_on_database_errors | No | A boolean flag indicating whether to retry after encountering errors of type [snowflake.connector.errors.DatabaseError](https://github.com/snowflakedb/snowflake-connector-python/blob/ffdd6b3339aa71885878d047141fe9a77c4a4ae3/src/snowflake/connector/errors.py#L361-L364) | | connect_retries | No | The number of times to retry after an unsuccessful connection | | connect_timeout | No | The number of seconds to sleep between failed connection retries | ### account -For AWS accounts in the US West default region, you can use `abc123` (without any other segments). For some AWS accounts you will have to append the region and/or cloud platform. For example, `abc123.eu-west-1.aws`. For GCP and Azure-based accounts, you have to append the region and cloud platform, such as `gcp` or `azure`, respectively. For example, `abc123.us-central1.gcp`. For details, see Snowflake's documention: "[Specifying Region Information in Your Account Hostname](https://docs.snowflake.com/en/user-guide/intro-regions.html#specifying-region-information-in-your-account-hostname)" and "[Account Identifier Formats by Cloud Platform and Region](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#account-identifier-formats-by-cloud-platform-and-region)". +For AWS accounts in the US West default region, you can use `abc123` (without any other segments). For some AWS accounts you will have to append the region and/or cloud platform. For example, `abc123.eu-west-1` or `abc123.eu-west-2.aws`. For GCP and Azure-based accounts, you have to append the region and cloud platform, such as `gcp` or `azure`, respectively. For example, `abc123.us-central1.gcp`. For details, see Snowflake's documention: "[Specifying Region Information in Your Account Hostname](https://docs.snowflake.com/en/user-guide/intro-regions.html#specifying-region-information-in-your-account-hostname)" and "[Account Identifier Formats by Cloud Platform and Region](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#account-identifier-formats-by-cloud-platform-and-region)". ### client_session_keep_alive diff --git a/website/docs/reference/warehouse-profiles/spark-profile.md b/website/docs/reference/warehouse-setups/spark-setup.md similarity index 59% rename from website/docs/reference/warehouse-profiles/spark-profile.md rename to website/docs/reference/warehouse-setups/spark-setup.md index 742b370dd24..14254683637 100644 --- a/website/docs/reference/warehouse-profiles/spark-profile.md +++ b/website/docs/reference/warehouse-setups/spark-setup.md @@ -1,18 +1,67 @@ --- title: "Apache Spark Profile" -id: "spark-profile" +id: "spark-setup" +meta: + maintained_by: dbt Labs + authors: 'core dbt maintainers' + github_repo: 'dbt-labs/dbt-spark' + pypi_package: 'dbt-spark' + min_core_version: 'v0.15.0' + cloud_support: Supported + min_supported_version: 'n/a' + slack_channel_name: 'db-databricks-and-spark' + slack_channel_link: 'https://getdbt.slack.com/archives/CNGCW8HKL' + platform_name: 'Spark' + config_page: 'spark-configs' --- -## Overview of dbt-spark +

    Overview of {frontMatter.meta.pypi_package}

    -**Maintained by:** core dbt maintainers -**Author:** dbt Labs -**Source:** [Github](https://github.com/dbt-labs/dbt-spark) -**dbt Cloud:** Supported -**dbt Slack channel** [Link to channel](https://getdbt.slack.com/archives/CNGCW8HKL) +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    +

    Installing {frontMatter.meta.pypi_package}

    -![dbt-spark stars](https://img.shields.io/github/stars/dbt-labs/dbt-spark?style=for-the-badge) +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +If connecting to Databricks via ODBC driver, it requires `pyodbc`. Depending on your system, you can install it seperately or via pip. See the [`pyodbc` wiki](https://github.com/mkleehammer/pyodbc/wiki/Install) for OS-specific installation details. + +If connecting to a Spark cluster via the generic thrift or http methods, it requires `PyHive`. + +```zsh +# odbc connections +$ pip install "dbt-spark[ODBC]" + +# thrift or http connections +$ pip install "dbt-spark[PyHive]" +``` + + + +```zsh +# session connections +$ pip install "dbt-spark[session]" +``` + + + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    ## Connection Methods @@ -60,7 +109,10 @@ your_profile_name: # optional port: [port] # default 443 user: [user] - + server_side_parameters: + # cluster configuration parameters, otherwise applied via `SET` statements + # for example: + # "spark.databricks.delta.schema.autoMerge.enabled": True ```
    @@ -86,6 +138,7 @@ your_profile_name: user: [user] auth: [e.g. KERBEROS] kerberos_service_name: [e.g. hive] + use_ssl: [true|false] # value of hive.server2.use.SSL, default false ```
    @@ -145,34 +198,26 @@ your_profile_name: -## Installation and Distribution - -dbt's adapter for Apache Spark and Databricks is managed in its own repository, [dbt-spark](https://github.com/dbt-labs/dbt-spark). To use it, -you must install the `dbt-spark` plugin. + -### Using pip -The following commands will install the latest version of `dbt-spark` as well as the requisite version of `dbt-core`. +## Optional configurations -If connecting to Databricks via ODBC driver, it requires `pyodbc`. Depending on your system, you can install it seperately or via pip. See the [`pyodbc` wiki](https://github.com/mkleehammer/pyodbc/wiki/Install) for OS-specific installation details. +### Retries -If connecting to a Spark cluster via the generic thrift or http methods, it requires `PyHive`. +Intermittent errors can crop up unexpectedly while running queries against Apache Spark. If `retry_all` is enabled, dbt-spark will naively retry any query that fails, based on the configuration supplied by `connect_timeout` and `connect_retries`. It does not attempt to determine if the query failure was transient or likely to succeed on retry. This configuration is recommended in production environments, where queries ought to be succeeding. -``` -# odbc connections -$ pip install "dbt-spark[ODBC]" -``` -``` -# thrift or http connections -$ pip install "dbt-spark[PyHive]" -``` +For instance, this will instruct dbt to retry all failed queries up to 3 times, with a 5 second delay between each retry: - + -``` -# session connections -$ pip install "dbt-spark[session]" +```yaml +retry_all: true +connect_timeout: 5 +connect_retries: 3 ``` + + ## Caveats @@ -188,6 +233,4 @@ on Delta Lake (Databricks). Delta-only features: 1. Incremental model updates by `unique_key` instead of `partition_by` (see [`merge` strategy](spark-configs#the-merge-strategy)) 2. [Snapshots](snapshots) - -Some dbt features, available on the core adapters, are not yet supported on Spark: -1. [Persisting](persist_docs) column-level descriptions as database comments +3. [Persisting](persist_docs) column-level descriptions as database comments diff --git a/website/docs/reference/warehouse-setups/sqlite-setup.md b/website/docs/reference/warehouse-setups/sqlite-setup.md new file mode 100644 index 00000000000..acdf1fd7e64 --- /dev/null +++ b/website/docs/reference/warehouse-setups/sqlite-setup.md @@ -0,0 +1,122 @@ +--- +title: "SQLite setup" +id: "sqlite-setup" +meta: + maintained_by: Community + authors: 'Jeff Chiu (https://github.com/codeforkjeff)' + github_repo: 'codeforkjeff/dbt-sqlite' + pypi_package: 'dbt-sqlite' + min_core_version: 'v1.1.0' + cloud_support: Not Supported + min_supported_version: 'SQlite Version 3.0' + slack_channel_name: 'n/a' + slack_channel_link: 'https://www.getdbt.com/community' + platform_name: 'SQLite' + config_page: 'no-configs' +--- + +:::info Community plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +Starting with the release of dbt-core 1.0.0, versions of dbt-sqlite are aligned to the same major+minor [version](https://semver.org/) of dbt-core. +- versions 1.1.x of this adapter work with dbt-core 1.1.x +- versions 1.0.x of this adapter work with dbt-core 1.0.x +- versions 0.2.x of this adapter work with dbt 0.20.x and 0.21.x +- versions 0.1.x of this adapter work with dbt 0.19.x +- versions 0.0.x of this adapter work with dbt 0.18.x + +## Connecting to SQLite with dbt-sqlite + +SQLite targets should be set up using the following configuration in your `profiles.yml` file. + +Example: + + + +```yaml +your_profile_name: + target: dev + outputs: + dev: + type: sqlite + threads: 1 + database: 'database' + schema: 'main' + schemas_and_paths: + main: 'file_path/database_name.db' + schema_directory: 'file_path' + #optional fields + extensions: + - "/path/to/sqlean/crypto.so" +``` + + + +#### Description of SQLite Profile Fields + +| Field | Description | +|--------------------------|--------------------------------------------------------------------------------------------------------| +| `type` | Required. Must be set to `sqlite`. | +| `threads` | Required. Must be set to `1`. SQLite locks the whole db on writes so anything > 1 won't help. | +| `database` | Required but the value is arbitrary because there is no 'database' portion of relation names in SQLite so it gets stripped from the output of ref() and from SQL everywhere. It still needs to be set in the configuration and is used by dbt internally.| +| `schema` | Value of 'schema' must be defined in schema_paths below. in most cases, this should be main. | +| `schemas_and_paths` | Connect schemas to paths: at least one of these must be 'main' | +| `schema_directory` | Directory where all *.db files are attached as schema, using base filename as schema name, and where new schemas are created. This can overlap with the dirs of files in schemas_and_paths as long as there's no conflicts. | +| `extensions` | Optional. List of file paths of SQLite extensions to load. crypto.so is needed for snapshots to work; see SQLlite Extensions below. | + +## Caveats + +- Schemas are implemented as attached database files. (SQLite conflates databases and schemas.) + + - SQLite automatically assigns 'main' to the file you initially connect to, so this must be defined in your profile. Other schemas defined in your profile + get attached when database connection is created. + + - If dbt needs to create a new schema, it will be created in `schema_directory` as `schema_name.db`. Dropping a schema results in dropping all its relations and detaching the database file from the session. + + - Schema names are stored in view definitions, so when you access a non-'main' database file outside dbt, you'll need to attach it using the same name, or the views won't work. + + - SQLite does not allow views in one schema (i.e. database file) to reference objects in another schema. You'll get this error from SQLite: "view [someview] cannot reference objects in database [somedatabase]". You must set `materialized='table'` in models that reference other schemas. + +- Materializations are simplified: they drop and re-create the model, instead of doing the backup-and-swap-in new model that the other dbt database adapters support. This choice was made because SQLite doesn't support `DROP ... CASCADE` or `ALTER VIEW` or provide information about relation dependencies in something information_schema-like. These limitations make it really difficult to make the backup-and-swap-in functionality work properly. Given how SQLite aggressively [locks](https://sqlite.org/lockingv3.html the database anyway, it's probably not worth the effort. + +## SQLite Extensions + +For snapshots to work, you'll need the `crypto` module from SQLean to get an `md5()` function. It's recommended that you install all the SQLean modules, as they provide many common SQL functions missing from SQLite. + +Precompiled binaries are available for download from the [SQLean github repository page](https://github.com/nalgeon/sqlean). You can also compile them yourself if you want. + +Point to these module files in your profile config as shown in the example above. + +Mac OS seems to ship with [SQLite libraries that do not have support for loading extensions compiled in](https://docs.python.org/3/library/sqlite3.html#f1), so this won't work "out of the box." Accordingly, snapshots won't work. If you need snapshot functionality, you'll need to compile SQLite/python or find a python distribution for Mac OS with this support. diff --git a/website/docs/reference/warehouse-profiles/teradata-profile.md b/website/docs/reference/warehouse-setups/teradata-setup.md similarity index 78% rename from website/docs/reference/warehouse-profiles/teradata-profile.md rename to website/docs/reference/warehouse-setups/teradata-setup.md index 97eca8f30f5..72b137097c5 100644 --- a/website/docs/reference/warehouse-profiles/teradata-profile.md +++ b/website/docs/reference/warehouse-setups/teradata-setup.md @@ -1,26 +1,50 @@ --- -title: "Teradata Profile" +title: "Teradata setup" +id: "teradata-setup" +meta: + maintained_by: Teradata + authors: Doug Beatty and Adam Tworkiewicz + github_repo: 'Teradata/dbt-teradata' + pypi_package: 'dbt-teradata' + min_core_version: 'v0.21.0' + cloud_support: Not Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-teradata' + slack_channel_link: 'https://getdbt.slack.com/archives/C027B6BHMT3' + platform_name: 'Teradata' + config_page: 'teradata-configs' --- - Some core functionality may be limited. If you're interested in contributing, check out the source code for the repository listed below. -## Overview of dbt-teradata +

    Overview of {frontMatter.meta.pypi_package}

    -**Maintained by:** Teradata -**Author:** Doug Beatty and Adam Tworkiewicz -**Source:** [GitHub](https://github.com/Teradata/dbt-teradata) -**dbt Slack channel:** [#db-teradata](https://getdbt.slack.com/archives/C027B6BHMT3) -**dbt Cloud:** Not Supported +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    -The package can be installed from PyPI with: -``` -pip install dbt-teradata -``` +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    -For other information including Teradata feature support, see the [GitHub README](https://github.com/Teradata/dbt-teradata/blob/main/README.md). +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    ### Connecting to Teradata @@ -101,4 +125,4 @@ For the full description of the connection parameters see https://github.com/Ter ### Commands -All dbt commands are supported. \ No newline at end of file +All dbt commands are supported. diff --git a/website/docs/reference/warehouse-setups/tidb-setup.md b/website/docs/reference/warehouse-setups/tidb-setup.md new file mode 100644 index 00000000000..d1a5f887bdf --- /dev/null +++ b/website/docs/reference/warehouse-setups/tidb-setup.md @@ -0,0 +1,131 @@ +--- +title: "TiDB setup" +id: "tidb-setup" +meta: + maintained_by: PingCAP + authors: Xiang Zhang, Qiang Wu, Yuhang Shi + github_repo: 'pingcap/dbt-tidb' + pypi_package: 'dbt-tidb' + core_version: 'v1.0.0 and newer' + cloud_support: Not Supported + slack_channel_name: '#db-tidb' + slack_channel_link: 'https://getdbt.slack.com/archives/C03CC86R1NY' + platform_name: 'TiDB' + config_page: 'no-configs' +--- + +:::info Vendor-supported plugin + +Some [core functionality](https://github.com/pingcap/dbt-tidb/blob/main/README.md#supported-features) may be limited. +If you're interested in contributing, check out the source code repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +## Connecting to TiDB with **dbt-tidb** + +### User / Password Authentication + +Configure your dbt profile for using TiDB: + +#### TiDB connection profile + + +```yaml +dbt-tidb: + target: dev + outputs: + dev: + type: tidb + server: 127.0.0.1 + port: 4000 + schema: database_name + username: tidb_username + password: tidb_password + + # optional + retries: 3 # default 1 +``` + + + +#### Description of Profile Fields + +| Option | Description | Required? | Example | +|----------|------------------------------------------------------|-----------|---------------------| +| type | The specific adapter to use | Required | `tidb` | +| server | The server (hostname) to connect to | Required | `yourorg.tidb.com` | +| port | The port to use | Required | `4000` | +| schema | Specify the schema (database) to build models into | Required | `analytics` | +| username | The username to use to connect to the server | Required | `dbt_admin` | +| password | The password to use for authenticating to the server | Required | `awesome_password` | +| retries | The retry times after an unsuccessful connection | Optional | `default 1` | + +## Database User Privileges + +Your database user would be able to have some abilities to read or write, such as `SELECT`, `CREATE`, and so on. +You can find some help [here](https://docs.pingcap.com/tidb/v4.0/privilege-management) with TiDB privileges management. + +| Required Privilege | +|------------------------| +| SELECT | +| CREATE | +| CREATE TEMPORARY TABLE | +| CREATE VIEW | +| INSERT | +| DROP | +| SHOW DATABASE | +| SHOW VIEW | +| SUPER | + +## Supported features + +| TiDB 4.X | TiDB 5.0 ~ 5.2 | TiDB >= 5.3 | Feature | +|:--------:|:--------------:|:-----------:|:---------------------------:| +| ✅ | ✅ | ✅ | Table materialization | +| ✅ | ✅ | ✅ | View materialization | +| ❌ | ❌ | ✅ | Incremental materialization | +| ❌ | ✅ | ✅ | Ephemeral materialization | +| ✅ | ✅ | ✅ | Seeds | +| ✅ | ✅ | ✅ | Sources | +| ✅ | ✅ | ✅ | Custom data tests | +| ✅ | ✅ | ✅ | Docs generate | +| ❌ | ❌ | ✅ | Snapshots | +| ✅ | ✅ | ✅ | Grant | +| ✅ | ✅ | ✅ | Connection retry | + +**Note:** + +* TiDB 4.0 ~ 5.0 does not support [CTE](https://docs.pingcap.com/tidb/dev/sql-statement-with), + you should avoid using `WITH` in your SQL code. +* TiDB 4.0 ~ 5.2 does not support creating a [temporary table or view](https://docs.pingcap.com/tidb/v5.2/sql-statement-create-table#:~:text=sec\)-,MySQL%20compatibility,-TiDB%20does%20not). +* TiDB 4.X does not support using SQL func in `CREATE VIEW`, avoid it in your SQL code. + You can find more detail [here](https://github.com/pingcap/tidb/pull/27252). diff --git a/website/docs/reference/warehouse-setups/trino-setup.md b/website/docs/reference/warehouse-setups/trino-setup.md new file mode 100644 index 00000000000..7939f4d5d7d --- /dev/null +++ b/website/docs/reference/warehouse-setups/trino-setup.md @@ -0,0 +1,119 @@ +--- +title: "Starburst & Trino setup" +id: "trino-setup" +meta: + maintained_by: Starburst Data, Inc. + authors: Matthew Carter, Andy Regan, Andrew Hedengren + github_repo: 'starburstdata/dbt-trino' + pypi_package: 'dbt-trino' + min_core_version: 'v0.20.0' + cloud_support: Not Supported + min_supported_version: 'n/a' + slack_channel_name: '#db-clickhouse' + slack_channel_link: 'https://getdbt.slack.com/archives/C01DRQ178LQ' + platform_name: 'Trino' + config_page: 'no-configs' +--- + +:::info Vendor-supported plugin + +Certain core functionality may vary. If you would like to report a bug, request a feature, or contribute, you can check out the linked repository and open an issue. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + + +## Set up a Trino or Starburst Target + +Trino or Starburst targets should be set up using the following configuration in your `profiles.yml` file. + +See all possible profile configuration options [here](#configuration). + + + +```yaml +trino: + target: dev + outputs: + dev: + type: trino + method: none # optional, one of {none | ldap | kerberos | oauth | jwt | certificate} + user: [user] + password: [password] # required if method is ldap or kerberos + database: [database name] + host: [hostname] + port: [port number] + schema: [your dbt schema] + threads: [1 or more] + retries: [1 or more] # default: 3 + http_scheme: [http or https] + session_properties: + [some_session_property]: [value] # run SHOW SESSION query to get current session properties +``` + + + +## Incremental models + +Incremental strategies supported by the adapter are: + +- append (default incremental strategy) - append only adds the new records based on the condition specified in the is_incremental() conditional block. +- delete+insert - Through the delete+insert incremental strategy, you can instruct dbt to use a two-step incremental approach. It will first delete the records detected through the configured is_incremental() block and re-insert them. +- merge - Through the merge incremental strategy, dbt-trino constructs a MERGE statement which inserts new and updates existing records based on the unique key (specified by unique_key). +If your unique_key is not actually unique, the delete+insert strategy can be used instead. Note that some connectors in Trino have limited or no support for MERGE. + +## Configuration + +A dbt-trino profile can be configured to run against Trino or Starburst using the following configuration: + +| Option | Description | Required? | Example | +|--------------------------------|--------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------|----------------------------------| +| method | The Trino authentication method to use | Optional (default is `none`, supported methods are `ldap`, `kerberos`, `jwt`, `oauth` or `certificate`) | `none` or `kerberos` | +| user | Username for authentication | Optional (required if `method` is `none`, `ldap` or `kerberos`) | `commander` | +| password | Password for authentication | Optional (required if `method` is `ldap`) | `none` or `abc123` | +| keytab | Path to keytab for kerberos authentication | Optional (may be required if `method` is `kerberos`) | `/tmp/trino.keytab` | +| krb5_config | Path to config for kerberos authentication | Optional (may be required if `method` is `kerberos`) | `/tmp/krb5.conf` | +| principal | Principal for kerberos authentication | Optional (may be required if `method` is `kerberos`) | `trino@EXAMPLE.COM` | +| service_name | Service name for kerberos authentication | Optional (default is `trino`) | `abc123` | +| jwt_token | JWT token for authentication | Optional (required if `method` is `jwt`) | `none` or `abc123` | +| client_certificate | Path to client certificate to be used for certificate based authentication | Optional (required if `method` is `certificate`) | `/tmp/tls.crt` | +| client_private_key | Path to client private key to be used for certificate based authentication | Optional (required if `method` is `certificate`) | `/tmp/tls.key` | +| http_headers | HTTP Headers to send alongside requests to Trino, specified as a yaml dictionary of (header, value) pairs. | Optional | `X-Trino-Client-Info: dbt-trino` | +| http_scheme | The HTTP scheme to use for requests to Trino | Optional (default is `http`, or `https` for `method: kerberos`, `ldap` or `jwt`) | `https` or `http` | +| cert | The full path to a certificate file for authentication with trino | Optional | | +| session_properties | Sets Trino session properties used in the connection | Optional | `query_max_run_time: 5d` | +| database | Specify the database to build models into | Required | `analytics` | +| schema | Specify the schema to build models into. Note: it is not recommended to use upper or mixed case schema names | Required | `public` | +| host | The hostname to connect to | Required | `127.0.0.1` | +| port | The port to connect to the host on | Required | `8080` | +| threads | How many threads dbt should use | Optional (default is `1`) | `8` | +| prepared_statements_enabled | Enable usage of Trino prepared statements (used in `dbt seed` commands) | Optional (default is `true`) | `true` or `false` | +| retries | Configure how many times a database operation is retried when connection issues arise | Optional (default is `3`) diff --git a/website/docs/reference/warehouse-setups/vertica-setup.md b/website/docs/reference/warehouse-setups/vertica-setup.md new file mode 100644 index 00000000000..23f5d581a71 --- /dev/null +++ b/website/docs/reference/warehouse-setups/vertica-setup.md @@ -0,0 +1,83 @@ +--- +title: "Vertica setup" +id: "vertica-setup" +meta: + maintained_by: Community + authors: Matthew Carter, Andy Regan, Andrew Hedengren + github_repo: 'mpcarter/dbt-vertica' + pypi_package: 'dbt-vertica' + min_core_version: 'v0.21.0' + cloud_support: Not Supported + min_supported_version: 'Vertica 10.0' + slack_channel_name: 'n/a' + slack_channel_link: 'https://www.getdbt.com/community/' + platform_name: 'Vertica' + config_page: 'no-configs' + +--- + +:::info Community plugin + +Some core functionality may be limited. If you're interested in contributing, check out the source code for each repository listed below. + +::: + +

    Overview of {frontMatter.meta.pypi_package}

    + +
      +
    • Maintained by: {frontMatter.meta.maintained_by}
    • +
    • Authors: {frontMatter.meta.authors}
    • +
    • GitHub repo: {frontMatter.meta.github_repo}
    • +
    • PyPI package: {frontMatter.meta.pypi_package}
    • +
    • Slack channel: {frontMatter.meta.slack_channel_name}
    • +
    • Supported dbt Core version: {frontMatter.meta.min_core_version} and newer
    • +
    • dbt Cloud support: {frontMatter.meta.cloud_support}
    • +
    • Minimum data platform version: {frontMatter.meta.min_supported_version}
    • +
    + + +

    Installing {frontMatter.meta.pypi_package}

    + +pip is the easiest way to install the adapter: + +pip install {frontMatter.meta.pypi_package} + +

    Installing {frontMatter.meta.pypi_package} will also install dbt-core and any other dependencies.

    + +

    Configuring {frontMatter.meta.pypi_package}

    + +

    For {frontMatter.meta.platform_name}-specifc configuration please refer to {frontMatter.meta.platform_name} Configuration

    + +

    For further info, refer to the GitHub repository: {frontMatter.meta.github_repo}

    + + +### Connecting to Vertica with **dbt-vertica** + +#### Username / password authentication + +Configure your dbt profile for using Vertica: + +##### Vertica connection information + + + +```yaml +your-profile: + outputs: + dev: + type: vertica # Don't change this! + host: vertica-host-name + port: 5433 # or your custom port (optional) + username: your-username + password: your-password + database: vertica-database-name + schema: your-default-schema + target: dev +``` + + + +By default, `dbt-vertica` will request `ConnectionLoadBalance=true` (which is generally a good thing), and set a session label of `dbt_your-username`. + +There are three options for SSL: `ssl`, `ssl_env_cafile`, and `ssl_uri`. +See their use in the code [here](https://github.com/mpcarter/dbt-vertica/blob/d15f925049dabd2833b4d88304edd216e3f654ed/dbt/adapters/vertica/connections.py#L72-L87). diff --git a/website/docs/terms/cte.md b/website/docs/terms/cte.md index 011ab9646a2..2489b623b7d 100644 --- a/website/docs/terms/cte.md +++ b/website/docs/terms/cte.md @@ -1,10 +1,15 @@ --- id: cte title: CTE in SQL +description: A CTE is a temporary result set that can be used in a SQL query. You can think of a CTE as a separate, smaller query within the larger query you’re building up. displayText: CTE hoverSnippet: A Common Table Expression (CTE) is a temporary result set that can be used in a SQL query. You can use CTEs to break up complex queries into simpler blocks of code that can connect and build on each other. --- + + CTE in SQL: Quite possibly the best thing to happen to SQL + + In a formal sense, a Common Table Expression (CTE), is a temporary result set that can be used in a SQL query. You can use CTEs to break up complex queries into simpler blocks of code that can connect and build on each other. In a less formal, more human-sense, you can think of a CTE as a separate, smaller query within the larger query you’re building up. Creating a CTE is essentially like making a temporary that you can access throughout the rest of the query you are writing. There are two-types of CTEs: recursive and non-recursive. This glossary focuses on non-recursive CTEs. @@ -23,7 +28,7 @@ How can you make these complexities in your code more digestible and usable? CTE ## CTE Syntax: How it works -To use CTEs, you begin by defining your first CTE using the `WITH` statement followed by a `SELECT`, `INSERT`, `UPDATE`, `DELETE`, or `MERGE` statement. +To use CTEs, you begin by defining your first CTE using the `WITH` statement followed by a `SELECT` statement. Let’s break down this example involving a `rename_columns` CTE below: @@ -61,7 +66,7 @@ When people talk about how CTEs can simplify your queries, they specifically mea #### Establish Structure -In leveraging CTEs, you can break complex code into smaller segments, ultimately helping provide structure to your code. At dbt Labs, we often like to use the [import, logical, and final structure](https://docs.getdbt.com/tutorial/refactoring-legacy-sql#implement-cte-groupings) for CTEs which creates a predictable and organized structure to your dbt models. +In leveraging CTEs, you can break complex code into smaller segments, ultimately helping provide structure to your code. At dbt Labs, we often like to use the [import, logical, and final structure](/docs/get-started/learning-more/refactoring-legacy-sql#implement-cte-groupings) for CTEs which creates a predictable and organized structure to your dbt models. #### Easily identify dependencies @@ -158,7 +163,7 @@ A is a nested query that can oftentimes be used in place ## Data warehouse support for CTEs -CTEs are likely to be supported across most, if not all, [modern data warehouses](https://blog.getdbt.com/future-of-the-modern-data-stack/). Please use this table to see more information about using CTEs in your specific data warehouse. +CTEs are likely to be supported across most, if not all, [modern data warehouses](https://blog.getdbt.com/future-of-the-modern-data-stack/). Please use this table to see more information about using CTEs in your specific . | Data Warehouse | Support CTEs? | |---|---| @@ -172,12 +177,11 @@ CTEs are likely to be supported across most, if not all, [modern data warehouses CTEs are essentially temporary views that can be used throughout a query. They are a great way to give your SQL more structure and readability, and offer simplified ways to debug your code. You can leverage appropriately-named CTEs to easily identify upstream dependencies and code functionality. CTEs also support recursiveness and reusability in the same query. Overall, CTEs can be an effective way to level-up your SQL to be more organized and understandable. -## Futher Reading +## Further Reading If you’re interested in reading more about CTE best practices, check out some of our favorite content around model refactoring and style: -- [Refactoring Legacy SQL to dbt SQL](https://docs.getdbt.com/tutorial/refactoring-legacy-sql#implement-cte-groupings) -- [dbt Best Practices](https://docs.getdbt.com/docs/guides/best-practices#break-complex-models-up-into-smaller-pieces) +- [Refactoring Legacy SQL to dbt](/docs/get-started/learning-more/refactoring-legacy-sql#implement-cte-groupings) - [dbt Labs Style Guide](https://github.com/dbt-labs/corp/blob/main/dbt_style_guide.md#ctes) - [Modular Data Modeling Technique](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/) diff --git a/website/docs/terms/dag.md b/website/docs/terms/dag.md new file mode 100644 index 00000000000..b3cd550e580 --- /dev/null +++ b/website/docs/terms/dag.md @@ -0,0 +1,111 @@ +--- +id: dag +title: DAG +description: A DAG is a Directed Acyclic Graph, a type of graph whose nodes are directionally related to each other and don’t form a directional closed loop. +displayText: DAG +hoverSnippet: A DAG is a Directed Acyclic Graph, a type of graph whose nodes are directionally related to each other and don’t form a directional closed loop. +--- + + + What is a DAG and why is it important? - dbt Labs + + +A DAG is a **D**irected **A**cyclic **G**raph, a type of graph whose nodes are directionally related to each other and don’t form a directional closed loop. In the practice of analytics engineering, DAGs are often used to visually represent the relationships between your data models. + +While the concept of a DAG originated in mathematics and gained popularity in computational work, DAGs have found a home in the modern data world. They offer a great way to visualize data pipelines and lineage, and they offer an easy way to understand dependencies between data models. + +## DAG use cases and best practices + +DAGs are an effective tool to help you understand relationships between your data models and areas of improvement for your overall data transformations. + +### Unpacking relationships and data lineage + +Can you look at one of your data models today and quickly identify all the upstream and downstream models? If you can’t, that’s probably a good sign to start building or looking at your existing DAG. + +:::tip Upstream or downstream? + +How do you know if a model is upstream or downstream from the model you’re currently looking at? Upstream models are models that must be performed prior to the current model. In simple terms, the current model depends on upstream models in order to exist. Downstream relationships are the outputs from your current model. In a visual DAG, such as the dbt Lineage Graph, upstream models are to the left of your selected model and downstream models are to the right of your selected model. Ever confused? Use the arrows that create the directedness of a DAG to understand the direction of movement. + +::: + +One of the great things about DAGs is that they are *visual*. You can clearly identify the nodes that connect to each other and follow the lines of directions. When looking at a DAG, you should be able to identify where your data sources are going and where that data is potentially being referenced. + +Take this mini-DAG for an example: + + + +What can you learn from this DAG? Immediately, you may notice a handful of things: + +- `stg_users`and `stg_user_groups` models are the parent models for `int_users` +- A join is happening between `stg_users` and `stg_user_groups` to form the `int_users` model +- `stg_orgs` and `int_users` are the parent models for `dim_users` +- `dim_users` is a the end of the DAG and is therefore downstream from a total of four different models + +Within 10 seconds of looking at this DAG, you can quickly unpack some of the most important elements about a project: dependencies and data lineage. Obviously, this is a simplified version of DAGs you may see in real life, but the practice of identifying relationships and data flows remains very much the same, regardless of the size of the DAG. + +What happens if `stg_user_groups` just up and disappears one day? How would you know which models are potentially impacted by this change? Look at your DAG and understand model dependencies to mitigate downstream impacts. + +### Auditing projects + +A potentially bold statement, but there is no such thing as a perfect DAG. DAGs are special in-part because they are unique to your business, data, and data models. There’s usually always room for improvement, whether that means making a CTE into its own view or performing a join earlier upstream, and your DAG can be an effective way to diagnose inefficient data models and relationships. + +You can additionally use your DAG to help identify bottlenecks, long-running data models that severely impact the performance of your data pipeline. Bottlenecks can happen for multiple reasons: +- Expensive joins +- Extensive filtering or [use of window functions](https://docs.getdbt.com/blog/how-we-shaved-90-minutes-off-model) +- Complex logic stored in views +- Good old large volumes of data + +...to name just a few. Understanding the factors impacting model performance can help you decide on [refactoring approaches](https://courses.getdbt.com/courses/refactoring-sql-for-modularity), [changing model materialization](https://docs.getdbt.com/blog/how-we-shaved-90-minutes-off-model#attempt-2-moving-to-an-incremental-model)s, replacing multiple joins with surrogate keys, or other methods. + + + +### Modular data modeling best practices + +See the DAG above? It follows a more traditional approach to data modeling where new data models are often built from raw sources instead of relying on intermediary and reusable data models. This type of project does not scale with team or data growth. As a result, analytics engineers tend to aim to have their DAGs not look like this. + +Instead, there are some key elements that can help you create a more streamlined DAG and [modular data models](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/): + +- Leveraging [staging, intermediate, and mart layers](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview) to create layers of distinction between sources and transformed data +- Abstracting code that’s used across multiple models to its own model +- Joining on surrogate keys versus on multiple values + +These are only a few examples of some best practices to help you organize your data models, business logic, and DAG. + +:::tip Is your DAG keeping up with best practices? + +Instead of manually auditing your DAG for best practices, the [dbt project evaluator package](https://github.com/dbt-labs/dbt-project-evaluator) can help audit your project and find areas of improvement. + +::: + +## dbt and DAGs + +The marketing team at dbt Labs would be upset with us if we told you we think dbt actually stood for “dag build tool,” but one of the key elements of dbt is its ability to generate documentation and infer relationships between models. And one of the hallmark features of [dbt Docs](https://docs.getdbt.com/docs/building-a-dbt-project/documentation) is the Lineage Graph (DAG) of your dbt project. + +Whether you’re using dbt Core or Cloud, dbt docs and the Lineage Graph are available to all dbt developers. The Lineage Graph in dbt Docs can show a model or source’s entire lineage, all within a visual frame. Clicking within a model, you can view the Lineage Graph and adjust selectors to only show certain models within the DAG. Analyzing the DAG here is a great way to diagnose potential inefficiencies or lack of modularity in your dbt project. + + + +The DAG is also [available in the dbt Cloud IDE](https://www.getdbt.com/blog/on-dags-hierarchies-and-ides/), so you and your team can refer to your lineage while you build your models. + +:::tip Leverage exposures + +One of the newer features of dbt is [exposures](https://docs.getdbt.com/docs/build/exposures), which allow you to define downstream use of your data models outside of your dbt project *within your dbt project*. What does this mean? This means you can add key dashboards, machine learning or data science pipelines, reverse ETL syncs, or other downstream use cases to your dbt project’s DAG. + +This level of interconnectivity and transparency can help boost data governance (who has access to and who [owns](https://docs.getdbt.com/reference/resource-configs/meta#designate-a-model-owner) this data) and transparency (what are the data sources and models affecting your key reports). + +::: + +## Conclusion + +A Directed acyclic graphs (DAG) is a visual representation of your data models and their connection to each other. The key components of a DAG are that nodes (sources/models/exposures) are directionally linked and don’t form acyclic loops. Overall, DAGs are an effective tool for understanding data lineage, dependencies, and areas of improvement in your data models. + +> *Get started with [dbt today](https://www.getdbt.com/signup/) to start building your own DAG!* + +## Further reading + +Ready to restructure (or create your first) DAG? Check out some of the resources below to better understand data modularity, data lineage, and how dbt helps bring it all together: + +- [Data modeling techniques for more modularity](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/) +- [How we structure our dbt projects](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview) +- [How to audit your DAG](https://www.youtube.com/watch?v=5W6VrnHVkCA) +- [Refactoring legacy SQL to dbt](/docs/get-started/learning-more/refactoring-legacy-sql) diff --git a/website/docs/terms/data-catalog.md b/website/docs/terms/data-catalog.md new file mode 100644 index 00000000000..c618a16cbfc --- /dev/null +++ b/website/docs/terms/data-catalog.md @@ -0,0 +1,87 @@ +--- +id: data-catalog +title: Data catalog +description: A data catalog is an inventory of data assets from different parts of the data stack within an organization. This catalog can display metadata, lineage, and business definitions from your different data sources. +displayText: data catalog +hoverSnippet: A data catalog is an inventory of data assets from different parts of the data stack within an organization. This catalog can display metadata, lineage, and business definitions from your different data sources. +--- + + + Data catalog: a centralized place for data about your data + + +A data catalog is an inventory of data assets from different parts of the data stack within an organization. It helps to connect different parts of your stack by showing how your data relates to one another, all within one central location. A catalog can display metadata, lineage, and business definitions from your different data sources and allows for easy collaboration and governance. Data catalogs allow analytics engineers to properly document all parts of their data stack, making for easy ownership. + +## Features of a data catalog + +Data catalogs are known for 6 features that make them so powerful for both data teams and busines users: + +- Access to metadata +- Business glossary functionality +- Built-in data lineage +- Support collaboration +- Enhanced data governance +- Varied integrations + +### Metadata + +Metadata is the data that describes data; this refers to characteristics like who created the dataset, its size, the databases and schemas it contains, and when it was last updated. It tells you where you can find a particular data source in terms of where it lives within your . Those properties help an analytics engineer fully understand the data and its intricacies before working with it. + +### Business glossary + +Business glossaries within a data catalog are helpful for understanding how a dataset and its columns relate back to their specific organization. They often contain information about the business description and purpose of a dataset or model, and they display the business definitions of columns and logic within a dataset. Business glossaries are particularly helpful for knowing which dataset and column to use in your logic when writing models or defining metrics. + +### Data lineage + +Data lineage provides a holistic view of how data moves through an organization, where it is transformed, protected, and consumed. Lineage shows the relationships between datasets and models across different platforms within your data stack. Lineage is particularly helpful for understanding dependencies between datasets. Depending on the tool you use, catalogs can show it on a dataset or even column level. This way, when you are updating any process within your stack, you can do so with these dependencies in mind. + +### Collaboration + +Data catalogs make collaboration between data and business teams easy. It allows you to see who owns what datasets from both a technical and organizational perspective. Business teams know who to talk to when data is incorrect or not fresh, while data teams know who to turn to for questions on business context. You can also know things like when a data was last updated, how it's used, or to get more context on your warehouse. + +### Data governance + +Data governance allows you to control who can access which domains within a catalog or specific datasets. Most data catalogs allow you to organize your data based on a specific area of the business and then limit user access to only their area of expertise. Catalogs also help with governing which datasets meet the data quality standards required for utilization. Many tools display a quality score and let you know when a dataset hasn’t been run in a while or is deprecated, preventing users from using unreliable data sources. + +Data catalogs can also be used to identify specific datasets and columns that contain PII data. This way, teams can have a better understanding of who should and should not have access to sensitive data. Having better clarity over sensitive data will help your business stay compliant and ensure its properly protecting customer information. + +### Integrations + +Data catalogs are compatible with many other tools in your modern data stack. They typically allow the documentation of your data warehouse, transformation tool, and business intelligence tool to all sit in one central location. This helps to build transparency across the stack and creates a single source of truth for the organization to depend on. + +## Types of data catalogs + +Like most data tools, there are two different types: an open-source data catalog and an enterprise data catalog. Let’s talk about the differences between these and the pros and cons of each. + +### Open source + +Open source data catalogs are free for you to use and often provide a great level of flexibility. You can build a custom solution that meets your exact needs and security requirements. But because they are free, you will be expected to manage the entire platform and set it up. Oftentimes, it’s not as simple as plugging in your various credentials to each tool in your modern data stack. It requires careful reading through the provided documentation and setting up each tool on its own, which often requires a certain threshold of technical skill. This makes for a typically more intense and potentially lengthy set-up process because there may not be experienced people to help you along the way and walk you through the steps. + +Open source tools also require maintenance. Oftentimes, settings will change in the connected platforms and it's up to your team to ensure compatibility and fix any breaking changes. + +In addition, with open source tools, you often need to host them yourself on a cloud provider of choice if your catalog will see broad use across the team. Depending on what you prefer, you may have to deploy multiple microservices on a platform like AWS or Azure. + +Lastly, you want to keep in mind your end user: Is this data catalog meant to be utilized by the data team or the larger business teams? Business users may have a harder time navigating an open source tool because it’s usually not as easy as logging in with an account. It requires more technical expertise to use effectively. If a business user has trouble with the catalog, it could cause a potential lag in important processes. + +### Enterprise data catalog software + +Enterprise data catalogs are different from open source in that they are completely managed by the company that creates them. You pay a fee to use them and are paying for the ongoing support, quick set-up process, and the minimal maintenance that comes with it. You are typically walked through it with a dedicated resource, and the integrations with external tools can be smoother because the vendor has teams dedicated to maintaining those relationships. + +The biggest things to keep in mind with enterprise data catalogs is your budget, use cases, and greater data culture. Can your organization afford to pay for a data catalog tool? While they require fewer engineering resources to maintain, they do come with a price tag. When considering if it is worth spending your money on an enterprise tool, make sure you consider where your business and data teams stand. Is your business at a place where it respects the initiatives put in place by the data team? Are the initiatives big enough where having one would make sense? + +Does the data team fully understand the data and its lineage? If they don’t, it's probably too early to put this in front of business users. You want to make sure they are set up for success when being trained to use a new tool. + +Do you have sophisticated data models and sources that the business knows how to use? If not, it may be worth focusing on building out the right metrics and models to be used first. + +Is the culture data-driven? If business users are caught up in their own processes like Google spreadsheets, they may not even utilize a catalog. You don’t want to pay for a tool that is too sophisticated for where the business and data teams currently stand. Don’t rush the process. +### Data catalog tools +Data teams may choose to use third-party tools with data cataloging capabilities such as [Atlan](https://ask.atlan.com/hc/en-us/articles/4433673207313-How-to-set-up-dbt-Cloud), Alation, [Collibra](https://marketplace.collibra.com/listings/dbt-lineage-to-collibra-integration/), [Metaphor](https://support.metaphor.io/hc/en-us/articles/9302185081627), [Select Star](https://docs.selectstar.com/integrations/dbt/dbt-cloud), [Castor](https://docs.castordoc.com/integrations/dbt), and others. These tools often integrate directly with your data pipelines and dbt workflows and offer zoomed-in data cataloging and lineage capabilities. + +## Conclusion + +Data catalogs are a valuable asset to any data team and business as a whole. They allow people within an organization to find the data that they need when they need it and understand its quality or sensitivity. This makes communication across teams more seamless, preventing problems that impact the business in the long run. Weigh your options in terms of whether to go with open source and enterprise, trusting that the decision you land on will be best for your organization. + +## Additional reading + +- [Why both data cataloging?](https://www.getdbt.com/analytics-engineering/transformation/data-catalog/) +- [Glossary: Data warehouse](/terms/data-warehouse) \ No newline at end of file diff --git a/website/docs/terms/data-extraction.md b/website/docs/terms/data-extraction.md new file mode 100644 index 00000000000..9a53562c97f --- /dev/null +++ b/website/docs/terms/data-extraction.md @@ -0,0 +1,78 @@ +--- +id: data-extraction +title: Data extraction +description: Data extraction is the process by which data is retrieved from multiple sources, often varying in volume and structure. +displayText: data extraction +hoverSnippet: Data extraction is the process by which data is retrieved from multiple sources, often varying in volume and structure. +--- + + + Data extraction: The first step of the ELT process + + +There is no data work without data. So how do data teams get the data they need? Data extraction is the process by which data is retrieved from multiple sources, often varying in volume and structure. Most data extraction processes are followed by a loading process, where that extracted data is loaded into a central . + +To actually extract said data, teams will often rely on various [ETL tools](https://docs.getdbt.com/terms/elt#elt-tools) or create custom scripts to call API endpoints. Other times data teams may be forced to do some hacky work like manually making and dropping a CSV into an S3 bucket. Definitely a rarity. But not unheard of. + +Come take a deep dive into data extraction, the process that allows us data folk to actually play with, well, said data. + +## Data extraction process: How does it work? + +There are two primary ways modern data teams are using to extract data: tools and custom scripts. + +- **Extraction via ETL tools**: SaaS ETL tools like Fivetran, Airbyte, and more, allow data folks to select connectors or data sources and sync their required data directly to their data warehouses. These platforms reduce the need to write custom API calls to data sources and instead allow data folks to worry more on transforming their data when it hits their data warehouse. +- **Extraction via custom scripts**: It’s probably inevitable, but at one point, you’re likely to find yourself hacking together a Python script to make API calls to a data source that doesn’t have a connector in an ETL tool. But let’s be real: while this is intimidating, it isn’t the end of the world. Writing and maintaining custom scripts for extracting data from data source APIs is not the most fun and there are real concerns (API limits, access tokens, lack of documentation, changing APIs, writing to external storage or directly to your data warehouse) to look out for, but gear up, read up on some basic curl requests and Python, and you got this. + +These two methods above are for automated extraction, processes that you only need to run once (in theory) to get the data you need on a regular basis. For non-automated processes, such as one-time extractions or uploads to your data warehouse, data folks can upload their data to external storage, such as S3 buckets, to load to your data warehouse, or leverage [dbt seeds](https://docs.getdbt.com/docs/building-a-dbt-project/seeds). + +## Commonly extracted data + +Obviously, the type of business you work for and the systems your team uses will determine the data you extract. However, there are usually some common sources that data teams will extract for business users: +- Ad platforms such as Facebook Ads, Google Ads, or Pinterest Ads +- Accounting softwares like Netsuite +- Sales CRMs such as Salesforce or HubSpot +- Backend application databases +- Customer service SaaS products like Zendesk or Kustomer + +The data that is typically extracted and loaded in your data warehouse is data that business users will need for baseline reporting, OKR measurement, or other analytics. + +:::tip Don’t fix what’s not broken +As we just said, there are usually common data sources that data teams will extract from, regardless of business. Instead of writing transformations for these tables and data sources, leverage [dbt packages](https://hub.getdbt.com/) to save yourself some carpal tunnel and use the work someone else has already done for you :) +::: + +## Data extraction tools + +If you’re not writing your own extraction scripts, you’re likely using an [ELT tool](https://docs.getdbt.com/terms/elt#elt-tools) to help you extract and load your various data sources into your data warehouse. Below, you’ll find some commonly used tools to help you do just that. + +| Tool | Description | Open source option? | +|:---:|:---:|:---:| +| Airbyte | Airbyte is an open-source and cloud service that allows teams to create data extraction and load pipelines. | ✅ | +| Stitch by Talend | Stitch (part of Talend) is another SaaS product that has many data connectors to extract data and load it into data warehouses. | ❌ | +| Fivetran/HVR | Fivetran is a SaaS company that helps data teams extract, load, and perform some transformation on their data. Fivetran easily integrates with modern data warehouses and dbt. They also offer transformations that leverage dbt Core. | ❌ | +| Funnel | Funnel is another product that can extract and load data. Funnel’s data connectors are primarily focused around marketing data sources. | ❌ | + +## Data extraction challenges to look out for + +There are definitely some considerable considerations in data extraction, mainly around costs and viability. + +- **Cadence and costs**: How often does your data need to be synced or refreshed? How often will your stakeholders really be looking at the data? There can be considerable costs to hitting API endpoints or retrieving data via ELT tools depending on the cadence you set for your data extractions. Talk to your stakeholders, understand when folks would leverage fresher data, and run some basic cost-benefit analyses to understand the cadence that works for your data extractions. +- **Viability**: Can you even extract the data your stakeholders need? As analytics engineers, your initial reaction is to check if an ETL tool has an existing connector for it. If it doesn’t, you may have to whip up a script to call the API (if there is one). If there is no API available, well, then it’s time to put on your creativity hat and get hacky! +- **PII concerns**: Oftentimes, data teams may be interested in masking PII data before it even hits their data warehouse. This would involve masking or removing the PII data immediately after extraction and immediately prior to loading the data into your data warehouse. For folks that want to mask PII, but are okay with masking it once it’s in their data warehouse, data teams can create masking policies using dbt packages. +- **Data accuracy**: This is less of a concern for data extracted via ETL tools or custom scripts, but for internal sources, such as static CSV files manually input by someone on your marketing team, you’re going to want to ensure that data is accurate (ideally before it hits your data warehouse). Not the end of the world if it does, but more of a nuisance than anything and something to look out for. + +:::tip Testing your data sources +Using dbt, data folks can run automated tests on their raw data that is loaded into their data warehouse via [sources](https://docs.getdbt.com/docs/building-a-dbt-project/using-sources). +::: + +## Conclusion + +Having no data extraction is the equivalent of a conductor not having an orchestra at their disposal: sad. Overall, data extraction in analytics engineering is the process of extracting data, usually via an automated ETL tool or script, for data sources that will later be loaded into a central data warehouse. There are some considerations to look at prior to the data extraction process, such as cost, viability, and PII concerns. + +## Further reading + +Ready to take a deeper dive into all things data extraction, ELT and dbt? Check out some of our favorite resources below: + +- [Glossary: ELT](https://docs.getdbt.com/terms/elt) +- [Glossary: ETL](https://docs.getdbt.com/terms/etl) +- [Four questions to help accurately scope analytics engineering projects](https://www.getdbt.com/blog/4-questions-to-help-you-more-accurately-scope-analytics-engineering-projects/) +- [Five principles that will keep your data warehouse organized](https://www.getdbt.com/blog/five-principles-that-will-keep-your-data-warehouse-organized/) diff --git a/website/docs/terms/data-lake.md b/website/docs/terms/data-lake.md new file mode 100644 index 00000000000..e1b75a616b9 --- /dev/null +++ b/website/docs/terms/data-lake.md @@ -0,0 +1,112 @@ +--- +id: data-lake +title: Data lake +description: A data lake is a data management system used for storing large amounts of data in in its raw, native form as files. +displayText: data lake +hoverSnippet: A data lake is a data management system used for storing large amounts of data in in its raw, native form as files. Data lakes can store any type of data—structured, semi-structured, unstructured—in one centralized place. +--- + + + Data lake: an integral addition to the MDS + + +A data lake is a data management system used for storing large amounts of data in in its raw, native form as files. Data lakes can store any type of data—structured, semi-structured, unstructured—in one centralized place. Several common data file formats that are widely being used today include CSV, , XML, Parquet, and Avro. This makes the data lake a cost-effective and flexible storage container in contrast to the , where data must be in a structured and tabular format. The primary use case of a data lake in many organizations is to serve as an initial staging area before data is ready to be transformed and aggregated in a data warehouse. + +## How do data lakes provide value? + +In the past, some organizations couldn’t store all their data in one centralized place because databases and data warehouses could only store structured, relational data. On top of that, data storage was once cost-prohibitive, hence data teams would have to filter and transform data volumes to smaller sizes first to be able to store them. These challenges have been addressed by cloud data lakes; they allow for scalability, flexibility, and cost savings—all of which are handled by the cloud platform itself. + +### Scalability + +Data lakes allow you to scale your storage up or down depending on how much data you need to store at a particular point in time. You no longer have to know and calculate upfront how much storage capacity you need because of the advent of cloud data lakes. In the past, setting up a new data lake involved considerable hardware configuration tasks. Now, all of this can be achieved in a few steps by hitting a few buttons on your web browser or by typing a few lines of code on your computer. + +### Flexibility + +At times, a data team might know data from a new source could be useful, but they might now know how it would be used yet. Data lakes offer a place to store this data without needing to build a use case for structuring or shaping it first. This is different from the approach that data warehouses take, where it was optimized to store and analyze relational, structured data. In addition to the data lake’s ability to store raw, uncurated data, the advent of data lake query engines (ex. Athena, Dremio, Starburst, etc.) mean that data analysts and data scientists can now perform exploratory data analysis (EDA) on top of a data lake using this layer of abstraction, without having to bring it into the data warehouse first. + +### Cost-effectiveness + +The rise of the data lake coincided with the cloud computing revolution. Data teams no longer had to worry about making massive upfront hardware investments for data storage. Instead, you pay a usage-based fee dependent on how much data you store and how many compute queries you run. + +### Modern approaches + +As mentioned earlier, storing data in the past was an expensive endeavor, therefore organizations had to curate and think through what type of data they brought into their data warehouse. This approach is called ETL (Extract-Transform-Load), where only transformed data ultimately gets stored and analyzed in a data warehouse or data lake. + +The ability to store tons of data in a cost-efficient and flexible way in the data lake gave rise to a new approach to processing data, a technique that aligns with the modern practice of analytics engineering—the ,Extract-Load-Transform (ELT) approach. In this new process, data is immediately loaded to the destination data lake upon extraction from the source. The benefit of this approach is that it allows for flexibility and exploration of new business use cases which may or may not be known initially when data is ingested. + +## What are the drawbacks and challenges when using a data lake? + +For all of the advantages of data lakes listed above such as cost-effectiveness and flexibility, they also come with several drawbacks and challenges. + +### Inability to do fast reporting + +Query performance and speed is one capability area where data warehouses typically trump data lakes. While structuring data first may seem inflexible and rigid at times, it is the right approach to implement when you have analyses and reports that you want to run frequently. The following are several query performance techniques that can only be applied to data warehouses: + +| Performance technique | Description | Practical scenario | +|:---:|:---:|:---:| +| Columnar storage | Data is physically organized in columns in a data warehouse rather than rows (in a database) or files (in a data lake) | Most analysis and reports require pulling only a subset of columns from an entire . Columnar storage makes your queries run faster by retrieving only the relevant columns for your analysis | +| Query caching | When a query is executed, the result is temporarily stored for a period of time | When someone executes a query on a table, the results of that table will be made available right away to the next user who executes the same query, significantly reducing computation time | +| Data partitioning | Grouping similar data together based on selected table dimensions | Many organizations partition their tables based on a dimension that includes a date field. The reason for this is that most analyses only require pulling data on a rolling two-year period. If you want to calculate year-to-date sales this year and compare it to the same period last year, partitioning your data based on date will make your queries run faster by eliminating the need to scan through the entire table first | + +That being said, storing it in a data lake first and exploring it with an ad hoc query engine would be the recommended approach if you have a massive data set with a still undefined use case you want to explore. + +### Lack of fine-grained access control + +It is difficult to enforce fine-grained access control on your data when it's in its raw form. Fine-grained access control pertains to granting permissions to a particular subset of your data set by restricting access to certain rows and columns. These two concepts are known as column-level security and row-level security: + +- **Column-level security**: A bank may want to anonymize columns that contain personally identifiable information (PII) such as credit card numbers, social security numbers, and so on. To achieve this, analytics engineers use a variety of encryption functions available in their data warehouse. +- **Row-level security**: Imagine a retailer with a massive table containing millions of rows of sales transactions across all 50 states in the US. These companies may want to dynamically enforce limited querying permissions to end-users based on which state they’re in. For example, when an analyst based in California starts querying the table, the data set would pre-filter itself to only show sales data from California, even if the analyst attempts to query the entire table. This type of row-level data governance is typically better suited for data warehouses than data lakes. + +## Data lake use cases + +Organizations use data lakes for many different reasons. Most of these reasons ultimately tie back to the three primary benefits of cost-effectiveness, scalability, and flexibility summarized earlier. Below are common use cases that data lakes are able to achieve: + +### Data archiving and storage + +Data lakes can support cost-effective archiving of historical data that is no longer being actively used. Most organizations have data retention and lifecycle policies that indicate how business data should be stored and analyzed, where it is typically organized into three tiers: Hot, Warm, and Cold storage. As an example, a company may state that the past two years’ worth of data belongs in the hot tier, data from three to five years ago are in the warm tier, and anything beyond that in the cold tier. + +| Storage tier | Access pattern | Description | +|:---:|:---:|:---:| +| Hot | Data that is being used often | This is primarily the level in which data warehouses lie. At this level, data is highly structured and optimized for reporting and analytics. Data lakes may also lie at this tier to support machine learning and exploratory data analysis use cases | +| Warm | Data that is infrequently accessed | At this level, data is infrequently accessed and stored at a lower cost than in the hot tier. On some occasions, data may need to be transitioned back to the hot tier which cloud computing companies allow you to do with relative ease | +| Cold | Data stored for archiving purposes | Data in this tier is rarely accessed. Typically, cold data must be retained for regulatory and compliance purposes on a long-term basis, if not indefinitely. | + +### Data science and machine learning + +Because of a data lake’s ability to store any type of data format, it lends itself well to advanced analytics use cases, especially those that require the use of semi-structured and unstructured data that data warehouses traditionally don’t support. Some examples include: + +- **Sentiment analysis**: This is a technique that uses statistics and natural language processing (NLP) algorithms to determine the emotional meaning of communications. Organizations use sentiment analysis to evaluate customer reviews, call center interactions, social media posts, and other related content, all of which require the use of unstructured data sources (e.g. free-form text, audio recordings) +- **Predictive maintenance**: This is a common use case in the field of manufacturing, mining, and other heavy industries. Organizations take advantage of a data lake’s ability to store machine logs, sensor and telemetry data to predict the probability of a piece of equipment failing before it happens. This enables the company to make proactive actions to service the equipment, thus preventing defects and maximizing resource utilization. + +### Exploratory data analysis (EDA) + +Because you don’t need to impose a formal structure for how data is organized in a data lake, you can perform preliminary data exploration on that data, such as calculate summary statistics, discover anomalies and outliers, and plot data visualizations to derive preliminary insights. Commonly referred to as EDA, this is typically conducted as an initial step before formalizing a data science or machine learning use case. + +## Data lake vs. data warehouse + +| | Data lake | Data warehouse | +|---|---|---| +| Types of data | Structured, Semi-Structured, Unstructured | Structured | +| Data stored in | Folders and files in raw format | Schemas and tabular data format | +| Schema/schema definition | Store data in its raw format, transform the data later | Must know upfront | +| Intended users | Data engineers, analytics engineers, data analysts, data scientists | Analytics engineers, data analysts, business analysts | +| Common use cases | Data archiving and storage, data science and machine learning, exploratory data analysis | Business intelligence, dashboarding, reporting and analytics | + +## Data platforms that support data lake workloads + +| Data Platform | Description | Data warehouse | +|:---:|:---:|---| +| Cloudera | Cloudera Open Data Lakehouse is a platform that provides data lake flexibility and data warehouse performance in a single platform. | Structured | +| Databricks | Databricks is a cloud-based collaborative data science, data engineering, and data analytics platform that brings the best of data warehouses and data lakes into a single unified platform. | Schemas and tabular data format | +| Dremio | Dremio is the data lakehouse platform built for SQL and built on open source technologies that both data engineers and data analysts love. Dremio powers BI dashboards and analytics directly on data lake storage. | Must know upfront | +| Snowflake | Snowflake is a fully-managed platform for data warehousing, data lakes, data engineering, data science, and data application development. | Analytics engineers, data analysts, business analysts | + +## Conclusion + +The data lake is the younger data management platform compared to its data warehouse counterpart. Because of its unique ability to hold large amounts of data in its native, raw format, it has allowed organizations to store all its data in a centralized place, even if sometimes they don’t have a definitive use case for the data yet. In addition, it serves as a great buffer and landing zone for data before it is ultimately transformed and aggregated in a data warehouse. Lastly, it has unlocked a world of new possibilities by enabling organizations to build data science and machine learning use cases on top of it. The data lake is an integral pillar in the Modern Data Stack and the practice of analytics engineering. + +## Additional reading +- [Glossary: Data warehouse](/terms/data-warehouse) +- [Glossary: ETL](/terms/etl) +- [Glossary: ELT](/terms/elt) +- [Glossary: EDW](/terms/edw) diff --git a/website/docs/terms/data-lineage.md b/website/docs/terms/data-lineage.md new file mode 100644 index 00000000000..61832bffe10 --- /dev/null +++ b/website/docs/terms/data-lineage.md @@ -0,0 +1,116 @@ +--- +id: data-lineage +title: Data lineage +description: Data lineage provides a holistic view of how data moves through an organization, where it’s transformed and consumed. +displayText: data lineage +hoverSnippet: Data lineage provides a holistic view of how data moves through an organization, where it’s transformed and consumed. +--- + + + Data lineage: What it is and how to automate it - dbt Labs + + +Data lineage provides a holistic view of how data moves through an organization, where it’s transformed and consumed. Overall, data lineage is a fundamental concept to understand in the practice of analytics engineering and modern data work. + +At a high level, a data lineage system typically provides data teams and consumers with one or both of the following resources: + +- A visual graph (DAG) of sequential workflows at the data set or column level +- A data catalog of data asset origins, owners, definitions, and policies + +This holistic view of the data pipeline allows data teams to build, troubleshoot, and analyze workflows more efficiently. It also enables business users to understand the origins of reporting data and provides a means for data discovery. + +We’ll unpack why data lineage is important, how it works in the context of analytics engineering, and where some existing challenges still exist for data lineage. + +## **Why is data lineage important?** + +As a data landscape grows in size and complexity, the benefits of data lineage become more apparent. For data teams, the three main advantages of data lineage include reducing root-cause analysis headaches, minimizing unexpected downstream headaches when making upstream changes, and empowering business users. + +### **Root cause analysis** + +It happens: dashboards and reporting fall victim to data pipeline breaks. Data teams quickly need to diagnose what’s wrong, fix where things may be broken, and provide up-to-date numbers to their end business users. But when these breaks happen (and they surely do) how can teams quickly identify the root cause of the problem? + +If data teams have some form of data lineage in place, they can more easily identify the root cause of the broken pipeline or data quality issue. By backing out into the data models, sources, and pipelines powering a dashboard a report, data teams can understand all the upstream elements impacting that work and see where the issues lie. + +Will a data lineage or a DAG solve your breaking pipelines? Definitely not. Will it potentially make your life easier to find problems in your data work? Heck yes. + +### **Downstream impacts on upstream changes** + +You may have been here—your backend engineering team drops the `customers` table to create a newer, more accurate `users` table. The only bad thing is…[they forgot to tell the data team about the change](https://docs.getdbt.com/blog/when-backend-devs-spark-joy). + +When you have a data lineage system, you can visually see which downstream models, nodes, and exposures are impacted by big upstream changes such as source or model renaming or removals. Referring to your DAG or data lineage system before any significant change to your analytics work is a great way to help prevent accidental downstream issues. + +### **Value to business users** + +While data lineage makes it easier for data teams to manage pipelines, stakeholders and leaders also benefit from data lineage, primarily around promoting data transparency into the data pipelines. + +**Shared data literacy** + +New hires, existing team members, and internal data practitioners can independently explore a holistic view of the data pipeline with a data lineage system. For data teams using a DAG to encapsulate their data work, business users have a clear visual representation of how data flows from different sources to the dashboards they consume in their BI tool, providing an increased level of transparency in data work. At the end of the day, the added visibility makes it easier for everyone to be on the same page. + +**Pipeline cleanliness** + +A visual graph (DAG) of how data flows through various workflows makes it easy to identify redundant loads of source system data or workflows that produce identical reporting insights. + +Spotlighting redundant data models can help trim down on WET (write every time/write everything twice) code, non-performant joins, and ultimately help promote reusability, modularity, and standardization within a data pipeline. + +Overall, data lineage and data-driven business go hand-in-hand. A data lineage system allows data teams to be more organized and efficient, business users to be more confident, and data pipelines to be more modular. + +## **How does data lineage work?** + +In the greater data world, you may often hear of data lineage systems based on tagging, patterns or parsing-based systems. In analytics engineering however, you’ll often see data lineage implemented in a DAG or through third-party tooling that integrates into your data pipeline. + +### **DAGs (directed acyclic graphs)** + +If you use a transformation tool such as dbt that automatically infers relationships between data sources and models, a DAG automatically populates to show you the lineage that exists for your data transformations. + + + +Your DAG is used to visually show upstream dependencies, the nodes that must come before a current model, and downstream relationships, the work that is impacted by the current model. DAGs are also directional—they show a defined flow of movement and form non-cyclical loops. + +Ultimately, DAGs are an effective way to see relationships between data sources, models, and dashboards. DAGs are also a great way to see visual bottlenecks, or inefficiencies in your data work (see image below for a DAG with...many bottlenecks). Data teams can additionally add [meta fields](https://docs.getdbt.com/reference/resource-configs/meta) and documentation to nodes in the DAG to add an additional layer of governance to their dbt project. + + + +:::tip Automatic > Manual + +DAGs shouldn’t be dependent on manual updates. Instead, your DAG should be automatically inferred and created with your data transformation and pipelines. Leverage tools such as dbt to build your own version-controlled DAG as you develop your data models. + +::: + +### **Third-party tooling** + +Data teams may also choose to use third-party tools  with  lineage capabilities such as [Atlan](https://ask.atlan.com/hc/en-us/articles/4433673207313-How-to-set-up-dbt-Cloud), Alation, [Collibra](https://marketplace.collibra.com/listings/dbt-lineage-to-collibra-integration/), Metaphor, [Monte Carlo](https://docs.getmontecarlo.com/docs/dbt-cloud) or [Select Star](https://docs.selectstar.com/integrations/dbt/dbt-cloud). These tools often integrate directly with your data pipelines and dbt workflows and offer zoomed-in data lineage capabilities such as column-level or business logic-level lineage. + +## **The challenges of data lineage** + +The biggest challenges around data lineage become more apparent as your data, systems, and business questions grow. + +### **Scaling data pipelines** + +As dbt projects scale with data and organization growth, the number of sources, models, macros, seeds, and [exposures](https://docs.getdbt.com/docs/build/exposures) invariably grow. And with an increasing number of nodes in your DAG, it can become harder to audit your DAG for WET code or inefficiencies. + +Working with dbt projects with thousands of models and nodes can feel overwhelming, but remember: your DAG and data lineage are meant to help you, not be your enemy. Tackle DAG audits in chunks, document all models, and [leverage strong structure conventions](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview). + +:::tip dbt project evaluator + +Is your DAG keeping up with best practices? Instead of manually auditing your DAG, the [dbt project evaluator package](https://github.com/dbt-labs/dbt-project-evaluator) can help audit your project and find areas of improvement. + +::: + +### **Column-level lineage** + +Complex workflows also add to the difficulties a data lineage system will encounter. For example, consider the challenges in describing a data source's movement through a pipeline as it's filtered, pivoted, and joined with other tables. These challenges increase when the granularity of the data lineage shifts from the table to the column level. + +As data lineage graphs mature and grow, it becomes clear that column or field-level lineage is often a needed layer of specificity that is not typically built-in to data lineage systems. [Some of the third party tooling](#third-party-tooling) from above can support column-level lineage. + +## **Conclusion** + +Data lineage is the holistic overview of how data moves through an organization or system, and is typically represented by a DAG. Analytics engineering practitioners use their DAG and data lineage to unpack root causes in broken pipelines, audit their models for inefficiencies, and promote greater transparency in their data work to business users. Overall, using your data lineage and DAG to know when your data is transformed and where it’s consumed is the foundation for good analytics work. + +## **Further reading** + +DAGs, data lineage, and root cause analysis…tell me more! Check out some of our favorite resources of writing modular models, DRY code, and data modeling best practices: + +- [Glossary: DRY](https://docs.getdbt.com/terms/dry) +- [Data techniques for modularity](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/) +- [How we structure our dbt projects](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview) diff --git a/website/docs/terms/data-warehouse.md b/website/docs/terms/data-warehouse.md new file mode 100644 index 00000000000..2c64fbaa7b7 --- /dev/null +++ b/website/docs/terms/data-warehouse.md @@ -0,0 +1,89 @@ +--- +id: data-warehouse +title: Data warehouse +description: How have data warehouses evolved over the last 40 years? Explore the nuanced changes in use case since Inmon first coined the term. +displayText: data warehouse +hoverSnippet: A data warehouse is a data management system used for data storage and computing that allows for analytics activities such as transforming and sharing data. +--- + + + Data warehouses in the modern data stack - dbt Labs + + +A data warehouse is a data management system used for data storage and computing that allows for analytics activities such as transforming and sharing data. It helps businesses to capture and store data from external sources. Analytics engineers and data analysts use it to query datasets using SQL, helping to transform them into powerful data models and reports. Data warehouses are the central source of truth for any modern data stack. Data is ingested, transformed, and shared to other tools from the warehouse. + +There are two main types of data warehouses — on-prem warehouses and cloud warehouses. An on-prem data warehouse is a physical location where companies need to maintain hardware and software in order to store data. A cloud data warehouse is available anywhere and doesn’t include a physical location that you need to access. In this arrangement, you pay to use the storage space and compute power that is provided and maintained by another company. + +## History of data warehouses + +While data has been stored throughout history, it wasn’t until the 1980s that technology began to accelerate and the first official data warehouse was created. It was an on-prem warehouse consisting of a lot of computer processing and storage towers, taking up a lot of space. As you can imagine, this caused a lot of problems. It not only took up a lot of physical space, but employees had to maintain the hardware and software of these warehouses. This quickly became expensive and unrealistic for smaller companies without the budget or space. + +When Amazon began scaling their on-prem data warehouses to support their business, they noticed an opportunity to sell compute capacity to other businesses in order to save costs. This is when Redshift, Amazon’s cloud data warehouse product, came to be. Shortly after, other tech giants like Google and Microsoft who were also building data infrastructure followed suit. + +Now, you can be anywhere and access the power of an online warehouse. You no longer need to maintain the infrastructure yourself but can pay a company to do this for you. This is cheaper and allows for faster data capabilities. + + +## Why businesses need data warehouses + +Data warehouses were once unrealistic due to the costs associated with them. Now that cloud warehouses make them available to nearly everyone, they have a plethora of benefits to offer businesses. Cloud warehouses allow for scalability, availability, cost savings, and increased security- all of which are handled by the provider themself. + +### Scalability + +Data warehouses allow you to scale computing up or down depending on how fast you need your transformations to run and how much you are willing to spend. You can turn computing resources on or off as well in order to save on costs. + +### Availability + +Data warehouses are always available. While latency may vary based on source and destination locations, your data can be accessed anywhere, at any time. This is ideal for the remote culture that we are currently living in, where anyone can work from anywhere. + +### Cost savings + +Because you no longer need to maintain all of the infrastructure, you can save on costs related to maintenance. Because the data warehouse companies manage so much data, they are able to unlock cost-savings that you wouldn’t be able to otherwise. + +### Security + +Data warehouses offer advanced security features that ensure your data is always secure. They often directly handle certain compliance strategies needed with healthcare and financial data, eliminating the need for you to do this yourself. They also have features such as roles and users which help you control who has access to your data. But we will get into this more later. + +## Potential business use cases + +Businesses can leverage data warehouses for many different reasons. Most of these reasons end up savings time and money for the business, whether directly or indirectly. + +### Consolidating all of your data in one place + +First, a data warehouse acts as a single source of truth for all of your data. Rather than having all of your data spread across different platforms, it is available to you in one place. This allows you to standardize all of your core metrics and data definitions, rather than depending on the metrics calculated by platforms like Google and Facebook. If you find that different metrics don’t align across platforms, a data warehouse acts as a dependable source for the right metric. Rather than relying on external platforms, you now have one that centralizes all of your data. + +Not to mention, you will save your analytics engineer and data analyst a few headaches. They would otherwise have to manually pull needed data from various sources. Not having a single source of truth decreases your data quality, wastes your data team’s precious time, and makes it difficult to combine data from different sources. + +### Ability to control who has access and the type of access they have + +Data warehouses have extensive security features which allow you to control who has access to what. You have the ability to give someone as little or extensive permissions as you wish. Warehouses give you the ability to create users and assign them roles. Each role has its own set of permissions to which databases and tables it can see. Then, you can also choose who is allowed to query those tables or even update and delete them. + +When anyone in your organization can easily access your data, bad things can happen. You risk the potential of important data being deleted, incorrectly edited, or inappropriately accessed. Data warehouses users, roles, policies, and security measures can help ensure data is in the right hands of the right people. + +### Fast reporting + +Because all of your data is located in the same place, it allows for faster reporting compared to pulling data from many different sources. A central location allows for you to quickly access and query millions of rows of data, allowing transformations and reporting to be done much faster. + +## Data platforms that support data warehousing workloads + +| **Data platform** | **Description** | +|---|---| +| Snowflake | Snowflake is a fully-managed platform for data warehousing, data lakes, data engineering, data science, and data application development. | +| Databricks | Databricks is a cloud-based collaborative data science, data engineering, and data analytics platform that combines the best of data warehouses and data lakes into a lakehouse architecture. | +| Google BigQuery | Google BigQuery is a serverless, highly scalable data warehouse that comes with a built-in query engine. | +| Amazon Redshift | Amazon Redshift is a fully-managed petabyte-scale cloud-based data warehouse designed for large scale data set storage and analysis. | +| Postgres | PostgreSQL is an advanced, enterprise class open source relational database that supports both SQL (relational) and (non-relational) querying. | + +## Data warehouse vs data lake + +A data lake is a system where you store, process, and query unstructured, semi-structured, and structured data at almost any scale. The main difference between a data warehouse and a data lake is the type and way data is stored. Data warehouses contain structured data that is meant to organize data for analytics use. Data lakes can contain pretty much any kind of data—structured or unstructured—and data is usually left it its raw format until it's ready to use. Compare that to data warehouses, whose primary goal is to be a place for data teams to store both raw and transformed, usable data. + +## Conclusion + +Data warehouses have come a long way [in the last 40 years](https://www.getdbt.com/blog/future-of-the-modern-data-stack/). They began as a physical location with huge costs associated with them to a system available to anyone, anywhere at an affordable cost. They have the power to centralize all of your business’s data, allowing for faster analytics operations, standardized KPIs, and a single source of truth. All businesses need a data warehouse in order to operate quickly and efficiently with data that they can rely on. The question isn’t whether or not you need a data warehouse, but which data warehouse you should choose. Make a list of the key features needed for your business and use that to assess the options above. + +## Additional reading + +- [Operational analytics](https://www.getdbt.com/analytics-engineering/use-cases/operational-analytics/) +- [Glossary: ETL](https://docs.getdbt.com/terms/etl/) +- [Glossary: ELT](https://docs.getdbt.com/terms/elt/) + diff --git a/website/docs/terms/data-wrangling.md b/website/docs/terms/data-wrangling.md new file mode 100644 index 00000000000..49d5054073b --- /dev/null +++ b/website/docs/terms/data-wrangling.md @@ -0,0 +1,167 @@ +--- +id: data-wrangling +title: Data wrangling +description: Data wrangling describes the different processes used to transform raw data into a consistent and easily usable format. The ultimate goal of data wrangling is to work in a way that allows you to dive right into analysis on a dataset or build upon that data. +displayText: data wrangling +hoverSnippet: Data wrangling describes the different processes used to transform raw data into a consistent and easily usable format. The ultimate goal of data wrangling is to work in a way that allows you to dive right into analysis on a dataset or build upon that data. +--- + + + Data wrangling: the workflow that bred analytics engineers + + +Data wrangling describes the different processes used to transform raw data into a consistent and easily usable format. For analytics engineers, you may know this better by the name of data cleaning. In data science or machine learning, "wrangling" often refers to prepping the data for model creation. + +The ultimate goal of data wrangling is to work in a way that allows you to dive right into analysis on a dataset or build upon that data in a downstream model without worrying about basic cleaning like renaming, datatype casting, etc. Data wrangling acts as preparation for the development of [intermediate, fct/dim, or mart data models](/guides/best-practices/how-we-structure/1-guide-overview) that form the base layer that other data work can be built off of. Analytics engineers tend to do data wrangling work in the staging layer as a first transformation step after loading the data. This eliminates a foundational step done by an analytics engineer or analyst when building a downstream data model or dashboard. + +## Data wrangling steps + +The data wrangling *structured* process includes data discovery, structuring, cleaning, enriching, validating, and publishing. While this is the general workflow, there isn't one definitive workflow. This will vary depending on the transformation tool you’re using and specific use case. + +### Data discovery + +Data discovery involves getting to know the data that you are working with. This involves looking at key statistical measures of your dataset. Some of these include: + +- Row count +- Number of columns +- Column data types +- Distribution of column values +- Number of duplicate rows +- Number of nulls + +Oftentimes, data warehouses have a preview capability so data team members can easily see a table’s makeup (column name, type, row count, etc.), but functions such as `SUM()` and `COUNT()` will come in handy for finding these values. You can use the `GROUP BY` statement with these functions to find the counts of certain rows for different categories of data. In addition, you’ll want to identify primary keys, check for duplicates of primary keys, and ensure every row of data has a column that can act as a primary key! + +### Structuring + +Structuring your data is a type of transformation that involves reformatting and reorganizing your data so that it is stored in a way that makes the values usable. This could mean rearranging how the data is displayed in columns and rows. Chances are you are using an tool to ingest your data, so the data is likely in a tabular format and you won’t need to do that much restructuring. If your data is structured, you really only need to worry about nested data types such as data. When structuring your data, you want to ask yourself these questions: + +- Is your data in the format you need to perform analysis on it? Does your data need to be potentially unnested? *Should you nest or objectize columns together?* +- Do the column names and values look correct for your use case? +Do the column names and values look correct for your use case? + +If your data is not in a format that is usable, you can look into different solutions such as pivoting or using different functions to unpack lists and JSON files so that they are in a tabular format. Pivoting is helpful because it allows you to change the way your dataset is structured by rearranging the way columns, rows, and their values are displayed. dbt has a [pre-built macro](https://github.com/dbt-labs/dbt-utils/blob/main/macros/sql/pivot.sql) that makes pivoting less of a headache and more of a breeze. + +### Cleaning + +The cleaning stage involves using different functions so that the values in your data tables are usable in your models and reports. The majority of the work done in staging models is this type of cleaning that includes: + +- Datatype casting +- Lower/upper casing string values +- Converting timestamps +- Aliasing/column renaming +- Removing appropriate duplicates or nulls you found in the discovery process +- Eliminating unnecessary characters or spaces from values + +Certain cleaning steps, like removing rows with null values, are helpful to do at the beginning of the process because removing nulls and duplicates from the start can increase the performance of your downstream models. In the cleaning step, it’s important to follow a standard for your transformations here. This means you should be following a consistent naming convention for your columns (especially for your primary keys) and casting to the same timezone and datatypes throughout your models. Examples include making sure all dates are in UTC time rather than source timezone-specific, all string in either lower or upper case, etc. + +:::tip dbt to the rescue! +If you're struggling to do all the cleaning on your own, remember that dbt packages ([dbt expectations](https://github.com/calogica/dbt-expectations), [dbt_utils](https://hub.getdbt.com/dbt-labs/dbt_utils/latest/), and [re_data](https://www.getre.io/)) and their macros are also available to help you clean up your data. +::: + +### Enriching + +Enriching your data means enhancing it by supplementing incomplete or missing data. This could involve basic case or coalesce statements that use an already existing column in order to produce a new column. It could also look like joining an already existing date column with a date table that contains more extensive information about a certain date. Keep in mind that you don’t want to go overboard with enriching or joining here—you only want to add what will be repeatedly used in modeling and analysis. + +:::tip Python for enrichment? +With the new capability of [Python in dbt](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/python-models), will folks start using Python to help enrich their data? Only time will tell, but we’re eager to hear how you want to be using Python in dbt. Please join the [#dbt-core-python-models channel](https://www.getdbt.com/community/join-the-community/) to join in on the discussions happening around them. +::: + +### Validating + +Validating data is the process of ensuring that the changes you just made to a dataset during your transformation are accurate. At this stage, you may be asking yourself: +- Are the primary keys still unique? Are there the same number of primary keys in this transformed table than in my upstream sources? +- Has the relationship with the upstream table(s) changed at all, or is it still 1-1? If not, is that expected behavior? +- Has the distribution of column values changed at all? Are column values even correct? +- Did I select the correct columns I want present at this stage? + +To answer these questions, you'll likely find yourself looking for and counting nulls, rows, duplicates, and primary keys. You'll likely reference upstream models regularly in this phase to ensure your transformation code is accurate and performing what you intended it to do. + +Validation is always a little manual, but [dbt tests, macros, and packages](#validating-1) can help make your data validation a little easier 😉 . + +### Publishing + +The last step of the data wrangling process is publishing. In analytics engineering, we typically refer to this as “pushing to production”. This essentially means that you are making the data models available to use in downstream data models, dashboards, and reports. This additionally means pushing the code changes for these staging models to the main branch in your git repository. For non-ephemeral models, the process of publishing could be as simple as running a query as a , creating a table in your production , or running dbt Cloud in production for table recreation. + +CI/CD jobs are often used as part of the publishing process to test and linter code before it is pushed to production. This helps to ensure changes made are actually reliable and safe to merge. CI/CD is a best practice because it allows data models to be updated quickly and efficiently, ensuring no downstream models are impacted. + +When pushing to production, you want to make sure these data models are accessible by those building the models and reports. This may mean you have to play around with users, roles, and permissions in your data warehouse. Your transformation tool should have read access from these tables. Additionally, you could use dbt grants to apply these permissions directly at build time. + +## Data wrangling benefits + +Why should you spend all of that time doing relatively tedious and repetitive work? Well, there are a number of benefits that can make the slog worth it. Those benefits include: + +- Increased data quality +- Increase data usability/modularity +- More standardization +- Deeper understanding of data +- Potential performance improvements on downstream models + +### Increased data quality + +Data wrangling increases the overall quality of your code and the data it produces. Because the cleaning is already done and validated, you don’t have to worry about someone forgetting to clean or standardize a dataset downstream and using messy or inconsistent data. + +### Increased data usability/modularity + +Because data is wrangled once when it is ingested into the data warehouse, analytics engineers don’t need to constantly be recleaning and transforming source data from its origin and follow practices. Wrangled data allows them to use clean and modular models repeatedly throughout their work. + +### Standardization + +When data is wrangled, it is matched with a standard set that your data team establishes that is then applied to all datasets. It ultimately creates consistent staging layers for analytics engineers to build their intermediate, fct/dim, and mart models. Data team members don’t need to worry about upholding standards in downstream models because this is already done when the data is first ingested. + +### Deeper understanding of data + +By first wrangling or cleaning data, you get to learn about the data’s intricacies in the process. Though manual, this process allows you to find issues in the data and understand them deeply before using them in downstream processes. This minimzes potential problems that can go unnoticed because you’ve already explored and validated the datasets. It also helps you understand how tables can be joined together downstream. + +Additionally, this initial data exploration and transformation helps you collaborate better with [backend application developers](https://docs.getdbt.com/blog/when-backend-devs-spark-joy) or data engineers to work on formatting the raw data in a format that is most appropriate for analytics work. + +### Potential performance improvements on downstream models + +Lastly, data wrangling allows for potential improvements in performance in downstream models. Because you’ve cleaned the data and potentially removed duplicates and null values, models will be quicker to run. +## Data wrangling in SQL + +SQL is the most common language for data wrangling. While you can wrangle data using other languages, such as Python, SQL is the most common (and straightforward) language used for data wrangling and transformation in relational databases. Let’s look at some of the most common SQL functions for each of the data wrangling steps. + +### SQL cleaning + +- `CAST` is commonly used to cast values in a column to a specified data type. + +- `CONVERT_TZ` can be used to convert values in a column to a specific timezone. + +- `LOWER`/`UPPER` is used to capitalize or lowercase string values. + +- `TRIM` can remove leading or trailing characters in strings, making string functions easier to use downstream or more consistent across tables. + +- `REPLACE` replaces a specified character in column values. + +You can also use custom built macros, such as those from a dbt package called [re_data](https://hub.getdbt.com/re-data/re_data/latest/), to clean columns using SQL. + +### Enriching + +Enriching data using SQL can often involve the use of functions, such as: + +- CASE statements allow you to replace values using “when-then” statements. They end with an “else” statement to catch the values that don’t fall in any of the “when-then” statements. +- `IFNULL` replaces any null values in a column with whatever value you specify. +- `COALESCE` returns the first non-null value from a list or column that you give it. This function is useful for replacing null values with one that you specify or coalescing multiple column values together. + +### Structuring + +Pivot tables come in handy when restructuring your data. You can use them to make your column names your values and vice versa. Dbt has a [macro](https://github.com/dbt-labs/dbt-utils/blob/main/macros/sql/pivot.sql) built out that allows you to completely customize and pivot your tables without having to write crazy complicated code. + +For nested data types such as JSON, you’ll want to check out the JSON parsing and extraction function of your data warehouse to help work with this data. + +### Validating + +dbt offers [generic tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests#more-generic-tests) in every dbt project that allows you to validate accepted, unique, and null values. They also allow you to validate the relationships between tables and that the primary key is unique. + +If you can’t find what you need with the generic tests, you can download an additional dbt testing package called [dbt_expectations](https://hub.getdbt.com/calogica/dbt_expectations/0.1.2/) that dives even deeper into how you can test the values in your columns. This package has useful tests like `expect_column_values_to_be_in_type_list`, `expect_column_values_to_be_between`, and `expect_column_value_lengths_to_equal`. + +## Conclusion + +You could argue that data wrangling is one of the most important parts of an analytics engineer's job. It increases data quality, makes your data usable, standardizes it, increases your understanding, and improves performance. None of this would be possible without data discovery, structuring, cleaning, enriching, validating, and publishing steps that make up the wrangling process. + +## Futher reading + +- [Our favorite SQL functions](https://www.getdbt.com/sql-foundations/top-sql-functions/) +- [Glossary: Data warehouse](/terms/data-warehouse) +- [Glossary: Primary key](/terms/primary-key) +- [Glossary: JSON](/terms/json) \ No newline at end of file diff --git a/website/docs/terms/dataframe.md b/website/docs/terms/dataframe.md new file mode 100644 index 00000000000..8981c8e4648 --- /dev/null +++ b/website/docs/terms/dataframe.md @@ -0,0 +1,107 @@ +--- +id: dataframe +title: DataFrame +description: A DataFrame is a way of storing and manipulating tabular data in Python. They gained popularity first as a part of R and then as a part of pandas. +displayText: dataframe +hoverSnippet: A DataFrame is a two-dimensional data structure (rows and columns). It's the most common way of representing and interacting with large datasets in Python. +--- + + What is a DataFrame in Python? - dbt Labs + + +A DataFrame is a way of storing and manipulating tabular data in Python. DataFrames are often likened to tables with columns and rows that you could find in any , Google Sheet, or Excel workbook. + +A DataFrame entry in an analytics engineering glossary…what is happening? You’re reading this right. While SQL is the go-to programming language for most analytics engineering work, there are likely inevitable situations where you've found yourself writing some Python and using DataFrames. + +While DataFrames are also used in other languages for data processing, such as R and Scala, the focus of this glossary page will be on Python DataFrames, their use cases, and their relation to analytics engineering work. + +## How DataFrames work + +DataFrames have a long history ([going back to 1990](https://towardsdatascience.com/preventing-the-death-of-the-dataframe-8bca1c0f83c8#:~:text=The%20earliest%20%E2%80%9Cdataframe%E2%80%9D%2C%20originally,Hastie%20in%201992%20%5B1%5D)!), but gained popularity first as a part of R and then as a part of [pandas](https://pandas.pydata.org/), an open source Python library of useful data analysis and manipulation tools. To work with DataFrames in Python, folks typically need to import the pandas library in the beginning of their script, `.py` file, or Python notebook with the conventional `import pandas as pd`. + +One of the strengths of DataFrames lies in its ability to take data in its original form (ex. array, list, , parquet, dictionary) and form a tabular (rows and columns) format out of it. Once this data is in a tabular format, you can apply functions and packages to that data to clean, transform, and enrich it. + +Below is an example creation of a Python DataFrame from a list and some light enrichment on it: + +```python +import pandas as pd + +def is_credit_card_purchase(x): + if x == 'credit_card': + return True + else: + return False + +jaffle_shop_orders = [[1, 1, 'credit_card', 1000], [2, 2, 'credit_card', 2000], [3,3, 'coupon', 100]] +orders_df = pd.DataFrame(jaffle_shop_orders, columns=['unique_id', 'order_id', 'payment_method', 'amount']) +orders_df.set_index(['unique_id'], inplace=True) +orders_df['is_credit_card'] = orders_df['payment_method'].apply(is_credit_card_purchase) + +print(orders_df) +``` + +This script will return an `orders_df` DataFrame that looks like this: + +| unique_id | order_id | payment_method | amount | is_credit_card | +|---|---|---|---|---| +| 1 | 1 | credit_card | 1000 | True +| 2 | 2 | credit_card | 2000 | True +| 3 | 3 | coupon | 100 | False + +:::info A note on Python flavors +If you’re running Python in Snowflake via Snowpark, you would typically be working with [Snowpark](https://docs.snowflake.com/en/developer-guide/snowpark/python/working-with-dataframes.html) or pandas DataFrames. For folks running Python from Google BigQuery or Databricks users, they can use both pandas or [PySpark DataFrames](https://docs.databricks.com/spark/latest/dataframes-datasets/introduction-to-dataframes-python.html). There might be slight syntax differences between the different Python flavors of Snowpark, PySpark, and pandas, but much of the functionality remains the same. +::: + +It's also possible and common practice to string together a number of DataFrame transformations. For example, if `df` represents a DataFrame containing one row per person living in the Eastern United States over the last decade, you can calculate the number of people living in Philadelphia each year: + +```python +df.filter("city == 'Philadelphia'") + .withColumn("population", count("name")) + .group_by("year") +``` + +In most distributed frameworks, these transformations are evaluated "lazily." Rather than performing each transformation, calculating its results, and storing those results, the framework develops a *plan* for how it *will* perform those calculations. When you want to *act* on the transformed DataFrame—see the top 10 results, or write it back to a table in the database—then the framework's optimizer calculates the most efficient way to deliver those results, based on all the steps you have defined. + +If you're familiar with SQL, you can think of a DataFrame like a `select` statement, and each new DataFrame operation as a separate . + +You can write a long SQL query containing many complex CTEs. When you run the query with `limit 10` to see a sample of its results, or create that query as a table in the database (what dbt does when it runs your model), the data warehouse optimizes your query and produces the results in the most efficient way possible. + +## DataFrame use cases + +You could probably write hundreds of pages on DataFrame use cases and examples, but at their core, DataFrames, *in the context of analytics engineering*, are often used to manipulate data outside of SQL capabilities, work with data during API extraction, and leverage data science and machine learning. + +### Enrichment and manipulation of data outside of SQL capabilities + +Let’s just say it: there’s a lot of things you can do in Python that could do in SQL and vice versa, but Python packages typically win out when it comes to data enrichment. A typical use case for Python DataFrames is the ability to apply Python libraries or functions to data in the DataFrame. + +In practice, this could look like applying an [IP parser](https://pypi.org/project/ipparser/) to an IP address column, using a package to determine whether a [date falls on a holiday](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/python-models#using-pypi-packages), or leveraging [numpy](https://numpy.org/) for performant and complex mathematical computations. + +:::tip dbt x Python DataFrames +With v1.3, dbt now supports the use of beta [Python models in dbt](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/python-models). What does this mean exactly? This means that Python-defined data transformations can be created and used in a dbt project in the same vein as a classic dbt SQL model. These Python models are incredibly new and the team is eagerly looking for feedback in how folks want to use and ritualize them. +::: + +### Manipulation of data during extraction and loading scripts + +It’s not the most pleasant of experiences, but as an analytics engineer, you’re going to find yourself writing a hacky Python script at one point to extract data from a system or API that doesn’t have an innate connector in an [ETL tool](https://docs.getdbt.com/terms/elt#elt-tools). + +As you unpack and unnest the JSON received from these API endpoints, you’ll likely use DataFrames to make your data (and life) a little easier to work with. We won’t go into great depth here since this probably won’t happen too often in your career as an analytics engineer, but it’s beneficial to understand the basics of DataFrames and working with [requests, JSON, and DataFrames](https://stackoverflow.com/questions/42518864/convert-json-data-from-request-into-pandas-dataframe). + +### Data science and machine learning + +If SQL is an analytics engineer’s oven, Python is a data scientist's stovetop. Data scientists and machine learning engineers often use Python and DataFrames to perform exploratory analysis, feature engineering and data preparation, and the application of models and algorithms on datasets. Understanding and using DataFrames is step 1 (of many steps) to becoming a data person that can create meaningful data science and machine learning models. + +All this data science and machine learning talk…“But, I’m an analytics engineer,” you say adamantly. One of the great, beautiful, and sometimes frustrating qualities about analytics engineers is their jack-of-all-trades-ness. You can transform data in your sleep, talk ROI and CPAs all day with your VP of marketing, and use git like you studied computer science in college—what can’t you do?? You’ve probably experimented with a predictive analytics model, some light forecasting, or sentiment analysis at one point in your data journey. You may not be interested in making the conversion to full-fledged data scientists or machine learning engineer, but enjoy a challenge from time to time. + +There’s a reason data warehouses and platforms like Snowflake, BigQuery, and Databricks are providing support for Python: because folks are asking for it. There are endless use cases for Python and DataFrames that fall outside of data science and machine learning work, but as you start working and feeling more comfortable in Python, you may be tempted to start experimenting with these different forms of data work. And the world’s your oyster, right? + +## Conclusion + +A DataFrame is a tabular data storage format in Python that is widely used across different roles in the data world. Since a DataFrame stores data in rows and columns, similar to how analytics engineers manipulate tables stored in data warehouses, data folks can transform, engineer, and enrich data in DataFrames using Python and Python packages. Analytics engineers may find themselves using DataFrames when they’re extracting data via APIs, enriching data with third-party packages, or experimenting with data science and machine learning models. + +## Further reading + +Are you ready to dip your toes in DataFrames, Python, and dbt? Check out some of the resources below to learn more about how dbt is embracing Python: + +- [Python models in dbt](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/python-models) +- #beta-feedback-python-models Slack channel in the [dbt Community Slack](https://www.getdbt.com/community/join-the-community/) +- [Best practices for developing Python models in dbt discussion](https://github.com/dbt-labs/docs.getdbt.com/discussions/1811) \ No newline at end of file diff --git a/website/docs/terms/ddl.md b/website/docs/terms/ddl.md index b31ea97235b..c4324e75fa9 100644 --- a/website/docs/terms/ddl.md +++ b/website/docs/terms/ddl.md @@ -1,11 +1,15 @@ --- id: ddl title: DDL +description: Data Definition Language (DDL) is a group of SQL statements that you can execute to manage database objects, including tables, views, and more. displayText: DDL hoverSnippet: Data Definition Language (DDL) is a group of SQL statements that you can execute to manage database objects, including tables, views, and more. -cta: staging --- + + What is Data Definition Language (DDL) in SQL? + + Data Definition Language (DDL) is a group of SQL statements that you can execute to manage database objects, including tables, views, and more. Using DDL statements, you can perform powerful commands in your database such as creating, modifying, and dropping objects. DDL commands are usually executed in a SQL browser or stored procedure. DDL is contrasted with Data Manipulation Language (DML) which is the SQL that is used to actually access and manipulate data in database objects. The majority of data analysts will rarely execute DDL commands and will do the majority of their work creating DML statements to model and analyze data. @@ -49,7 +53,7 @@ In this example, you have to rename the `last_name` column [in jaffle_shop’s]( ### DROP -The `DROP` command. Probably the most high-stakes DDL statement one can execute. One that should be used with the *utmost* of care. At its core, an executed `DROP` statement will remove that object from the data warehouse. You can drop tables, views, schemas, databases, users, functions, and more. +The `DROP` command. Probably the most high-stakes DDL statement one can execute. One that should be used with the *utmost* of care. At its core, an executed `DROP` statement will remove that object from the . You can drop tables, views, schemas, databases, users, functions, and more. Some data warehouses such as Snowflake allow you to add restrictions to `DROP` statements to caution you about the impact of dropping a table, view, or schema before it’s actually dropped. In practice, we recommend you never drop raw source tables as they are often your baseline of truth. Your database user also usually needs the correct permissions to drop database objects. diff --git a/website/docs/terms/dimensional-modeling.md b/website/docs/terms/dimensional-modeling.md new file mode 100644 index 00000000000..f54f601b7d3 --- /dev/null +++ b/website/docs/terms/dimensional-modeling.md @@ -0,0 +1,159 @@ +--- +id: dimensional-modeling +title: Dimensional modeling +description: Dimensional modeling is a data modeling technique where you break data up into “facts” and “dimensions” to organize and describe entities in your data warehouse +displayText: dimensional modeling +hoverSnippet: Dimensional modeling is a data modeling technique where you break data up into “facts” and “dimensions” to organize and describe entities within your data warehouse. +--- + + + Dimensional modeling: An essential concept in data modeling + + +Dimensional modeling is a data modeling technique where you break data up into “facts” and “dimensions” to organize and describe entities within your data warehouse. The result is a staging layer in the data warehouse that cleans and organizes the data into the business end of the warehouse that is more accessible to data consumers. + +By breaking your data down into clearly defined and organized entities, your consumers can make sense of what that data is, what it’s used for, and how to join it with new or additional data. Ultimately, using dimensional modeling for your data can help create the appropriate layer of models to expose in an end business intelligence (BI) tool. + +There are a few different methodologies for dimensional modeling that have evolved over the years. The big hitters are the Kimball methodology and the Inmon methodology. Ralph Kimball’s work formed much of the foundation for how data teams approached data management and data modeling. Here, we’ll focus on dimensional modeling from Kimball’s perspective—why it exists, where it drives value for teams, and how it’s evolved in recent years. + +## What are we trying to do here? + +Let’s take a step back for a second and ask ourselves: why should you read this glossary page? What are you trying to accomplish with dimensional modeling and data modeling in general? Why have you taken up this rewarding, but challenging career? Why are *you* here? + +This may come as a surprise to you, but we’re not trying to build a top-notch foundation for analytics—we’re actually trying to build a bakery. + +Not the answer you expected? Well, let’s open up our minds a bit and explore this analogy. + +If you run a bakery (and we’d be interested in seeing the data person + baker venn diagram), you may not realize you’re doing a form of dimensional modeling. What’s the final output from a bakery? It’s that glittering, glass display of delicious-looking cupcakes, cakes, cookies, and everything in between. But a cupcake just didn’t magically appear in the display case! Raw ingredients went through a rigorous process of preparation, mixing, melting, and baking before they got there. + +Just as eating raw flour isn’t that appetizing, neither is deriving insights from raw data since it rarely has a nice structure that makes it poised for analytics. There’s some considerable work that’s needed to organize data and make it usable for business users. + +This is where dimensional modeling comes into play; it’s a method that can help data folks create meaningful entities (cupcakes and cookies) to live inside their [data mart](https://docs.getdbt.com/guides/best-practices/how-we-structure/4-marts) (your glass display) and eventually use for business intelligence purposes (eating said cookies). + +So I guess we take it back—you’re not just trying to build a bakery, you’re also trying to build a top-notch foundation for meaningful analytics. Dimensional modeling can be a method to get you part of the way there. + +## Facts vs. dimensions + +The ultimate goal of dimensional modeling is to be able to categorize your data into their fact or dimension models, making them the key components to understand. So what are these components? + +### Facts + +A fact is a collection of information that typically refers to an action, event, or result of a business process. As such, people typically liken facts to verbs. In terms of a real business, some facts may look like account creations, payments, or emails sent. + +It’s important to note that fact tables act as a historical record of those actions. You should almost never overwrite that data when it needs updating. Instead, you add new data as additional rows onto that table. + +For many businesses, marketing and finance teams need to understand all the touchpoints leading up to a sale or conversion. A fact table for a scenario like this might look like a `fct_account_touchpoints` table: + +| **unique_id** | **touchpoint_id** | **account_id** | **touchpoint_name** | **touchpoint_created_at_utc** | +|---|---|---|---|---| +| 23534 | 34 | 325611 | fall_convention_2020 | 2022-01-30 00:11:26 | +| 12312 | 29 | 325611 | demo_1 | 2022-05-29 01:42:07 | +| 66782 | 67 | 325611 | demo_2 | 2022-06-25 04:10:32 | +| 85311 | 15 | 105697 | fall_convention_2020 | 2022-05-29 06:13:45 | + +Accounts may have many touch points and this table acts as a true log of events leading up to an account conversion. + +This table is great and all for helping understanding what might have led to a conversion or account creation, but what if business users need additional context on these accounts or touchpoints? That’s where dimensions come into play. + +### Dimensions +A dimension is a collection of data that describe who or what took action or was affected by the action. Dimensions are typically likened to nouns. They add context to the stored events in fact tables. In terms of a business, some dimensions may look like users, accounts, customers, and invoices. + +A noun can take multiple actions or be affected by multiple actions. It’s important to call out: a noun doesn’t become a new thing whenever it does something. As such, when updating dimension tables, you should overwrite that data instead of duplicating them, like you would in a fact table. + +Following the example from above, a dimension table for this business would look like an `dim_accounts` table with some descriptors: + +| account_id | account_created_at_utc | account_name | account_status | billing_address | +|---|---|---|---|---| +| 325611 | 2022-06-29 12:11:43 | Not a Pyramid Scheme | active | 9999 Snake Oil Rd, Los Angeles, CA | +| 234332 | 2019-01-03 07:34:50 | Charlie’s Angels’ Chocolate Factory | inactive | 123 Wonka Way, Indianapolis, IN | +| 105697 | 2020-12-11 11:50:22 | Baggins Thievery | active | The Shire | + +In this table, each account only has one row. If an account’s name or status were to be updated, new values would overwrite existing records versus appending new rows. + +:::tip Snapshots +For fact tables you want to keep track of changes to, folks can leverage [dbt snapshots](https://docs.getdbt.com/docs/building-a-dbt-project/snapshots). +::: + +### Facts and dimensions at play with each other +Cool, you think you’ve got some facts and dimensions that can be used to qualify your business. There’s one big consideration left to think about: how do these facts and dimensions interact with each other? + +![Image of depicting how facts and dimensions join together to create analytics ready datasets](/img/docs/terms/dimensional-modeling/fact-star.png) + +Pre-cloud data warehouses, there were two dominant design options, star schemas and snowflake schemas, that were used to concretely separate out the lines between fact and dimension tables. + +- In a star schema, there’s one central fact table that can join to relevant dimension tables. +- A snowflake schema is simply an extension of a star schema; dimension tables link to other dimension tables making it form a snowflake-esque shape. + +It sounds really nice to have this clean setup with star or snowflake schemas. Almost as if it’s too good to be true (and it very well could be). + +The development of cheap cloud storage, BI tools great at handling joins, the evolution of SQL capabilities, and data analysts with growing skill sets have changed the way data folks use to look at dimensional modeling and star schemas. Wide tables consisting of fact and dimension tables joined together are now a competitive option for data teams. + +Below, we’ll dig more into the design process of dimensional modeling, wide tables, and the beautiful ambiguity of it all. + +## The dimensional modeling design process + +According to the Kimball Group, the official(™) four-step design process is (1) selecting a business process to analyze, (2) declaring the , (3) Identifying the dimensions, and (4) Identifying the facts. That makes dimensional modeling sound really easy, but in reality, it’s packed full of nuance. + +Coming back down to planet Earth, your design process is how you make decisions about: + +- Whether something should be a fact or a dimension +- Whether you should keep fact and dimension tables separate or create wide, joined tables + +This is something that data philosophers and thinkers could debate long after we’re all gone, but let’s explore some of the major questions to hold you over in the meantime. + +### Should this entity be a fact or dimension? + +Time to put on your consultant hat because that dreaded answer is coming: it depends. This is what makes dimensional modeling a challenge! + +Kimball would say that a fact must be numeric. The inconvenient truth is: an entity can be viewed as a fact or a dimension depending on the analysis you are trying to run. + +:::note Birds of a feather +If you ran a clinic, you would probably have a log of appointments by patient. At first, you could think of appointments as facts—they are, after all, events that happen and patients can have multiple appointments—and patients as dimensions. But what if your business team really cared about the appointment data itself—how well it went, when it happened, the duration of the visit. You could, in this scenario, make the case for treating this appointments table as a dimension table. If you cared more about looking at your data at a patient-level, it probably makes sense to keep appointments as facts and patients as dimensions. All this to say is that there’s inherent complexity in dimensional modeling, and it’s up to you to draw those lines and build those models. +::: + +So then, how do you know which is which if there aren’t any hard rules!? Life is a gray area, my friend. Get used to it. + +A general rule of thumb: go with your gut! If something feels like it should be a fact to meet your stakeholders' needs, then it’s a fact. If it feels like a dimension, it’s a dimension. The world is your oyster. If you find that you made the wrong decision down the road, (it’s usually) no big deal. You can remodel that data. Just remember: you’re not a surgeon. No one will die if you mess up (hopefully). So, just go with what feels right because you’re the expert on your data 👉😎👉 + +Also, this is why we have data teams. Dimensional modeling and data modeling is usually a collaborative effort; working with folks on your team to understand the data and stakeholder wants will ultimately lead to some rad data marts. + +### Should I make a wide table or keep them separate? + +Yet again, it depends. Don’t roll your eyes. Strap in for a quick history lesson because the answer to this harkens back to the very inception of dimensional modeling. + +Back in the day before cloud technology adoption was accessible and prolific, storing data was expensive and joining data was relatively cheap. Dimensional modeling came about as a solution to these issues. Separating collections of data into smaller, individual tables (star schema-esque) made the data cheaper to store and easier to understand. So, individual tables were the thing to do back then. + +Things are different today. Cloud storage costs have gotten really inexpensive. Instead, computing is the primary cost driver. Now, keeping all of your tables separate can be expensive because every time you join those tables, you’re spending usage credits. + +Should you just add everything to one, wide table? No. One table will never rule them all. Knowing whether something should be its own fact table or get added on to an existing table generally comes down to understanding who will be your primary end consumers. + +For end business users who are writing their own SQL, feel comfortable performing joins, or use a tool that joins tables for them, keeping your data as separate fact and dimension tables is pretty on-par. In this setup, these users have the freedom and flexibility to join and explore as they please. + +If your end data consumers are less comfortable with SQL and your BI tool doesn’t handle joins well, you should consider joining several fact and dimension tables into wide tables. Another consideration: these wide, heavily joined tables can tend to wind up pretty specialized and specific to business departments. Would these types of wide tables be helpful for you, your data team, and your business users? Well, that’s for you to unpack. + +## Advantages and disadvantages of dimensional modeling + +The benefits and drawbacks of dimensional modeling are pretty straightforward. Generally, the main advantages can be boiled down to: + +* **More accessibility**: Since the output of good dimensional modeling is a [data mart](https://docs.getdbt.com/guides/best-practices/how-we-structure/4-marts), the tables created are easier to understand and more accessible to end consumers. +* **More flexibility**: Easy to slice, dice, filter, and view your data in whatever way suits your purpose. +* **Performance**: Fact and dimension models are typically materialized as tables or [incremental models](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/configuring-incremental-models). Since these often form the core understanding of a business, they are queried often. Materializing them as tables allows them to be more performant in downstream BI platforms. + +The disadvantages include: +* **Navigating ambiguity**: You need to rely on your understanding of your data and stakeholder wants to model your data in a comprehensible and useful way. What you know about your data and what people really need out of the data are two of the most fundamental and difficult things to understand and balance as a data person. +* **Utility limited by your BI tool**: Some BI tools don’t handle joins well, which can make queries from separated fact and dimensional tables painful. Other tools have long query times, which can make querying from ultra-wide tables not fun. + +## Conclusion + +Dimensional data modeling is a data modeling technique that allows you to organize your data into distinct entities that can be mixed and matched in many ways. That can give your stakeholders a lot of flexibility. [While the exact methodologies have changed](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/)—and will continue to, the philosophical principle of having tables that are sources of truth and tables that describe them will continue to be important in the work of analytics engineering practitioners. + + +## Additional Reading + +Dimensional modeling is a tough, complex, and opinionated topic in the data world. Below you’ll find some additional resources that may help you identify the data modeling approach that works best for you, your data team, and your end business users: + + + +* [Modular data modeling techniques](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/) +* [Stakeholder-friendly model naming conventions](https://docs.getdbt.com/blog/stakeholder-friendly-model-names/) +* [How we structure our dbt projects guide](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview) diff --git a/website/docs/terms/dml.md b/website/docs/terms/dml.md index ed3d6326edb..fb8a0f6d610 100644 --- a/website/docs/terms/dml.md +++ b/website/docs/terms/dml.md @@ -1,11 +1,15 @@ --- id: dml title: DML +description: Data Manipulation Language (DML) is a class of SQL statements that are used to query, edit, add and delete row-level data from database tables or views. displayText: DML hoverSnippet: Data Manipulation Language (DML) is a class of SQL statements that are used to query, edit, add and delete row-level data from database tables or views. The main DML statements are SELECT, INSERT, DELETE, and UPDATE. - --- + + DML: The SQL statements that make the data world go 'round + + Data Manipulation Language (DML) is a class of SQL statements that are used to query, edit, add and delete row-level data from database tables or views. The main DML statements are `SELECT`, `INSERT`, `DELETE`, and `UPDATE`. DML is contrasted with Data Definition Language (DDL) which is a series of SQL statements that you can use to edit and manipulate the *structure* of databases and the objects in them. @@ -21,7 +25,7 @@ The primary DML statements are `SELECT`, `INSERT`, `DELETE`, and `UPDATE`. With - To *access* the data in databse object, use `SELECT` statements :::important Important -For the most part, the syntax for DML statements are pretty universal across [most databases that dbt supports](https://docs.getdbt.com/docs/available-adapters) including Google Bigquery, Databricks, Postgres, Amazon Redshift, and Snowflake. Regardless, please use the “Further Reading” section to see the specifics on how the following DML statements would be implemented in your database of interest! +For the most part, the syntax for DML statements are pretty universal across [Supported Data Platforms](https://docs.getdbt.com/docs/supported-data-platforms) including Google Bigquery, Databricks, Postgres, Amazon Redshift, and Snowflake. Regardless, please use the “Further Reading” section to see the specifics on how the following DML statements would be implemented in your database of interest! ::: ### SELECT @@ -92,8 +96,8 @@ DML statements allow you to query, edit, add, and remove data stored in database For more resources on why people who use dbt don’t write DML, check out the following: -- [Why not write DML](https://docs.getdbt.com/faqs/why-not-write-dml) -- [SQL dialect](https://docs.getdbt.com/faqs/sql-dialect) +- [Why not write DML](/faqs/project/why-not-write-dml) +- [SQL dialect](/faqs/models/sql-dialect) For database-specific DML documents, please check out the resources below: diff --git a/website/docs/terms/dry.md b/website/docs/terms/dry.md index 366ae7cbc97..0a0d6f14393 100644 --- a/website/docs/terms/dry.md +++ b/website/docs/terms/dry.md @@ -1,15 +1,95 @@ --- id: dry title: DRY +description: DRY is a software development principle that stands for “Don’t Repeat Yourself.” Living by this principle means that your aim is to reduce repetitive patterns and code. displayText: DRY -hoverSnippet: Don’t Repeat Yourself - a coding ideal to work towards that can be applied at several levels (e.g. a variable, definition of business / transformation logic, joining two models, etc.) +hoverSnippet: DRY is a software development principle that stands for “Don’t Repeat Yourself.” Living by this principle means that your aim is to reduce repetitive patterns and duplicate code and logic in favor of modular and referenceable code. --- -:::important This page could use some love -This term would benefit from additional depth and examples. Have knowledge to contribute? [Create a discussion in the docs.getdbt.com GitHub repository](https://github.com/dbt-labs/docs.getdbt.com/discussions) to begin the process of becoming a glossary contributor! + + What is DRY? Hint: It makes for great code - dbt Labs + + +DRY is a software development principle that stands for “Don’t Repeat Yourself.” Living by this principle means that your aim is to reduce repetitive patterns and duplicate code and logic in favor of modular and referenceable code. + +The DRY code principle was originally made with software engineering in mind and coined by Andy Hunt and Dave Thomas in their book, _The Pragmatic Programmer_. They believed that “every piece of knowledge must have a single, unambiguous, authoritative representation within a system.” As the field of analytics engineering and data transformation develops, there’s a growing need to adopt [software engineering best practices](https://www.getdbt.com/product/what-is-dbt/), including writing DRY code. + +## Why write DRY code? + +DRY code is one of the practices that makes a good developer, a great developer. Solving a problem by any means is great to a point, but eventually, you need to be able to write code that's maintainable by people other than yourself and scalable as system load increases. That's the essence of DRY code. + +But what's so great about being DRY as a bone anyway, when you can be WET? + +### Don’t be WET + +WET, which stands for “Write Everything Twice,” is the opposite of DRY. It's a tongue-in-cheek reference to code that doesn’t exactly meet the DRY standard. In a practical sense, WET code typically involves the repeated _writing_ of the same code throughout a project, whereas DRY code would represent the repeated _reference_ of that code. + +Well, how would you know if your code isn't DRY enough? That’s kind of subjective and will vary by the norms set within your organization. That said, a good rule of thumb is [the Rule of Three](https://en.wikipedia.org/wiki/Rule_of_three_(writing)#:~:text=The%20rule%20of%20three%20is,or%20effective%20than%20other%20numbers.). This rule states that the _third_ time you encounter a certain pattern, you should probably abstract it into some reusable unit. + +There is, of course, a tradeoff between simplicity and conciseness in code. The more abstractions you create, the harder it can be for others to understand and maintain your code without proper documentation. So, the moral of the story is: DRY code is great as long as you [write great documentation.](https://docs.getdbt.com/docs/building-a-dbt-project/documentation) + +### Save time & energy + +DRY code means you get to write duplicate code less often. You're saving lots of time writing the same thing over and over. Not only that, but you're saving your cognitive energy for bigger problems you'll end up needing to solve, instead of wasting that time and energy on tedious syntax. + +Sure, you might have to frontload some of your cognitive energy to create a good abstraction. But in the long run, it'll save you a lot of headaches. Especially if you're building something complex and one typo can be your undoing. + +### Create more consistent definitions + +Let's go back to what Andy and Dave said in _The Pragmatic Programmer_: “Every piece of knowledge must have a single, unambiguous, authoritative representation within a system.” As a data person, the words “single” and “unambiguous” might have stood out to you. + +Most teams have essential business logic that defines the successes and failures of a business. For a subscription-based DTC company, this could be [monthly recurring revenue (MRR)](https://www.getdbt.com/blog/modeling-subscription-revenue/) and for a SaaS product, this could look like customer lifetime value (CLV). Standardizing the SQL that generates those metrics is essential to creating consistent definitions and values. + +By writing DRY definitions for key business logic and metrics that are referenced throughout a dbt project and/or BI (business intelligence) tool, data teams can create those single, unambiguous, and authoritative representations for their essential transformations. Gone are the days of 15 different definitions and values for churn, and in are the days of standardization and DRYness. + +:::note Experimental dbt Metrics! +dbt v1.0 currently supports the use of experimental metrics, time series aggregations over a table that support zero or one dimensions. Using [dbt Metrics](https://docs.getdbt.com/docs/building-a-dbt-project/metrics), data teams can define metric calculations, ownerships, and definitions in a yaml file that lives within their dbt project. dbt Metrics are in their experimental stage; if you’re interesting in learning more about dbt Metrics, please make sure to join the #dbt-metrics-and-server channel in the [dbt Community Slack](https://www.getdbt.com/community/join-the-community/). ::: -Don’t Repeat Yourself - a coding ideal to work towards that can be applied at several levels: a variable, definition of business or transformation logic, joining two models, etc. +## Tools to help you write DRY code + +Let’s just say it: Writing DRY code is easier said than done. For classical software engineers, there’s a ton of resources out there to help them write DRY code. In the world of data transformation, there are also some tools and methodologies that can help folks in [the field of analytics engineering](https://www.getdbt.com/what-is-analytics-engineering/) write more DRY and [modular code](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/). + + +### Common Table Expressions (CTEs) + +CTEs are a great way to help you write more DRY code in your data analysis and dbt models. In a formal sense, a CTE is a temporary results set that can be used in a query. In a much more human and practical sense, we like to think of CTEs as separate, smaller queries within the larger query you’re building up. Essentially, you can use CTEs to break up complex queries into simpler blocks of code that are easier to debug and can connect and build off of each other. + +If you’re referencing a specific query, perhaps for aggregations that join back to an unaggregated view, CTEs can simply be referenced throughout a query with its CTE_EXPRESSION_NAME. + + +### View materializations + +View [materializations](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/materializations) are also extremely useful for abstracting code that might otherwise be repeated often. A is a defined passthrough SQL query that can be run against a database. Unlike a table, it doesn’t store data, but it defines the logic that you need to use to fetch the underlying data. + +If you’re referencing the same query, CTE, or block of code, throughout multiple data models, that’s probably a good sign that code should be its own view. + +For example, you might define a SQL view to count new users created in a day: + +```sql + select + created_date, + count(distinct(user_id)) as new_users + from {{ ref('users') }} + group by created_date +``` + +While this is a simple query, writing this logic every time you need it would be super tedious. And what if the `user_id` field changed to a new name? If you’d written this in a WET way, you’d have to find every instance of this code and make the change to the new field versus just updating it once in the code for the view. + +To make any subsequent references to this view DRY-er, you simply reference the view in your data model or query. + +### dbt macros and packages + +dbt also supports the use of [macros](https://docs.getdbt.com/docs/building-a-dbt-project/jinja-macros) and [packages](https://docs.getdbt.com/docs/building-a-dbt-project/package-management) to help data folks write DRY code in their dbt projects. Macros are Jinja-supported functions that can be reused and applied throughout a dbt project. Packages are libraries of dbt code, typically models, macros, and/or tests, that can be referenced and used in a dbt project. They are a great way to use transformations for common data sources (like [ad platforms](https://hub.getdbt.com/dbt-labs/facebook_ads/latest/)) or use more [custom tests for your data models](https://hub.getdbt.com/calogica/dbt_expectations/0.1.2/) _without having to write out the code yourself_. At the end of the day, is there really anything more DRY than that? + +## Conclusion + +DRY code is a principle that you should always be striving for. It saves you time and energy. It makes your code more maintainable and extensible. And potentially most importantly, it’s the fine line that can help transform you from a good analytics engineer to a great one. -Benefits include ease of maintenance and readability. +## Further reading +* [Data modeling technique for more modularity](https://www.getdbt.com/analytics-engineering/modular-data-modeling-technique/) +* [Why we use so many CTEs](https://docs.getdbt.com/docs/guides/best-practices) +* [Glossary: CTE](https://docs.getdbt.com/terms/cte) +* [Glossary: Materialization](https://docs.getdbt.com/terms/materialization) +* [Glossary: View](https://docs.getdbt.com/terms/view) diff --git a/website/docs/terms/edw.md b/website/docs/terms/edw.md new file mode 100644 index 00000000000..1ac0f37ee47 --- /dev/null +++ b/website/docs/terms/edw.md @@ -0,0 +1,63 @@ +--- +id: edw +title: EDW +description: The primary difference between an EDW and a regular data warehouse is, well, semantics and perspective. An EDW like any other data warehouse, is a collection of databases that centralize a business's data +displayText: EDW +hoverSnippet: An Enterprise Data Warehouse (EDW), like any other data warehouse, is a collection of databases that centralize a business's information from multiple sources and applications. +--- + + + What does an EDW (Enterprise Data Warehouse) really mean? + + +An Enterprise Data Warehouse (EDW), like any other , is a collection of databases that centralize a business's information from multiple sources and applications. The primary difference between an EDW and a regular data warehouse is, well, semantics and perspective. + +The data stored in an EDW comes from all different functions of a company—marketing, finance, engineering, product, and more. The primary goal of an EDW is to provide a central and organized home for both the raw and transformed version of this data. EDWs in modern data practices are typically set-up in the cloud, meaning that the servers used to run the warehouse are owned and managed by a cloud provider such as Snowflake, AWS Redshift, or Google BigQuery. + + +## Data warehouse vs enterprise data warehouse + +![](/img/docs/terms/edw/meme.png) + +In an EDW, all departments of an organization store their raw and transformed data in databases within a main warehouse. For organizations that are not calling their data warehouse an EDW and have a more siloed setup, there’s a chance each department each has *their own separate* data warehouse for storage and computation. **But practically, the difference between a data warehouse and an enterprise data warehouse is semantics.** + +Organization size, distribution, data complexity, and business needs can all determine whether a company wants a centralized data warehouse or distributed warehouses per function. Nonetheless, if your organization only has one data warehouse that centrally houses all of your data sources, the distinction isn't really necessary, but *technically* that could be called an EDW. + +In the world of analytics engineering, most teams have one central data warehouse that houses data from all of their different departments and functions. + + +### Why is this distinction necessary? + +One of the main distinctions is in an organization’s users and distribution. If an organization has multiple databases, a central data warehouse is used to create separate entities between raw and source data, staging work, and ready-for-use analytics datasets. In this EDW and classic data warehouse setup, data is accessible across an organization, data teams can create tables that join data from multiple sources, and users can gain enriched perspectives into their data. + +If a company has very siloed departments that manage their own data, budgets, and have little need for crossover with other departments or data sources, emphasizing the difference between a central EDW data warehouse and their own data warehouse could be a necessity for budgeting and governance reasons. + +Lastly, the somewhat exponential adoption of cloud data warehouses in the last decade has shifted the terminology from what many people called an EDW to a data warehouse. + + +## Enterprise data warehouse use cases + +There are a variety of reasons why an organization might opt to have an EDW or data warehouse. A centralized and organized data warehouse provide advantages for the following use cases: + +- Create clear partitions between raw, staging, and heavily transformed data +- Standardize data definitions and metrics across multiple data sources +- Connect a BI tool to one central data warehouse and surface that data to users across a business + +### Benefits of an EDW + +Like most other data warehouses, the benefit of an EDW is the ability to store raw and transformed data from multiple sources in one single data warehouse. Users across different departments and data team members embedded in different functions can all have access to the same data. Cloud data warehouses also scale with data and users, making EDWs an appropriate place for organizations to grow their analytics work. + +EDWs also help in building a 360-degree view of the company by combining different sources of information, such as customer feedback, financial records, product inventory, and marketing insights. All of this information can then be organized in data marts, schemas, and tables within one EDW that are eventually exposed to a BI tool. + +In addition, because all of an organization’s data is stored in one place, data teams can provide access to only those who need access to specific schemas and tables. Keeping these access patterns and changes in only one data warehouse will limit the amount of data needed to go through for auditing and other security regulations. + +## Conclusion + +An enterprise data warehouse is, in general, like any other data warehouse; it acts as a central home for multiple departments’ raw and transformed data. An EDW is often composed of multiple databases to store raw, staging, development, and production-ready data. The primary benefits for an EDW are centralization, standardization, and accessibility. You probably have a data warehouse setup like an EDW, you’re likely just not calling it that 😉 + + +## Additional reading +EDW, data warehouse, or something different altogether? Check out some of our favorite resources on the fundamental of data storage and organization: + +- [Glossary: Dimensional modeling](https://docs.getdbt.com/terms/dimensional-modeling) +- [Glossary: Data warehouse](https://docs.getdbt.com/terms/data-warehouse) \ No newline at end of file diff --git a/website/docs/terms/elt.md b/website/docs/terms/elt.md index 764ed544dec..b36c0486d9e 100644 --- a/website/docs/terms/elt.md +++ b/website/docs/terms/elt.md @@ -1,11 +1,14 @@ --- id: elt title: ELT +description: ELT is the process of first extraction data from different sources, then loading it into a data warehouse, and finally transforming it. displayText: ELT hoverSnippet: Extract, Load, Transform (ELT) is the process of first extracting data from different data sources, loading it into a target data warehouse, and finally transforming it. --- - -Extract, Load, Transform (ELT) is the process of first extracting data from different data sources, then loading it into a target data warehouse, and finally transforming it. + + ELT: What it is and why it's often better than ETL + +Extract, Load, Transform (ELT) is the process of first extracting data from different data sources, then loading it into a target , and finally transforming it. ELT has emerged as a paradigm for how to manage information flows in a modern data warehouse. This represents a fundamental shift from how data previously was handled when Extract, Transform, Load (ETL) was the data workflow most companies implemented. @@ -44,7 +47,7 @@ In the final transformation step, the raw data that has been loaded into your da - Some columns are potentially the incorrect data type - Tables are not joined to other tables - Timestamps may be in the incorrect timezone for your reporting -- JSON fields may need to be unnested +- fields may need to be unnested - Tables may be missing primary keys - And more! @@ -119,7 +122,7 @@ As mentioned earlier, the recent development of certain technologies and product |---|---|---|---| | Fivetran/HVR | E, some T, L | Fivetran is a SaaS company that helps data teams extract, load, and perform some transformation on their data. Fivetran easily integrates with modern data warehouses and dbt. They also offer transformations that leverage dbt Core. | :x: | | Stitch by Talend | E, L | Stitch (part of Talend) is another SaaS product that has many data connectors to extract data and load it into data warehouses. | :x: | -| Airbyte | E, L | Airbyte is an open-source and cloud service that allows teams to create data extraction and load pipelines. | :white_check_mark: | +| Airbyte | E, L | Airbyte is an open-source and cloud service that allows teams to create and load pipelines. | :white_check_mark: | | Funnel | E, some T, L | Funnel is another product that can extract and load data. Funnel’s data connectors are primarily focused around marketing data sources. | :x: | | dbt | T | dbt is the transformation tool that enables data analysts and engineers to transform, test, and document data in the cloud data warehouse. dbt offers both an open-source and cloud-based product. | :white_check_mark: | diff --git a/website/docs/terms/etl.md b/website/docs/terms/etl.md index 9dd07ce31ab..acc0e7b1c40 100644 --- a/website/docs/terms/etl.md +++ b/website/docs/terms/etl.md @@ -1,11 +1,16 @@ --- id: etl title: ETL +description: ETL is the process of first extracting data from a data source, transforming it, and then loading it into a target data warehouse. displayText: ETL hoverSnippet: Extract, Transform, Load (ETL)is the process of first extracting data from a data source, transforming it, and then loading it into a target data warehouse. --- -ETL, or “Extract, Transform, Load”, is the process of first extracting data from a data source, transforming it, and then loading it into a target data warehouse. In ETL workflows, much of the meaningful data transformation occurs outside this primary pipeline in a downstream business intelligence (BI) platform. + + ETL: What is it and is it still relevant? - dbt Labs + + +ETL, or “Extract, Transform, Load”, is the process of first extracting data from a data source, transforming it, and then loading it into a target . In ETL workflows, much of the meaningful data transformation occurs outside this primary pipeline in a downstream business intelligence (BI) platform. ETL is contrasted with the newer (Extract, Load, Transform) workflow, where transformation occurs after data has been loaded into the target data warehouse. In many ways, the ETL workflow could have been renamed the ETLT workflow, because a considerable portion of meaningful data transformations happen outside the data pipeline. The same transformations can occur in both ETL and ELT workflows, the primary difference is *when* (inside or outside the primary ETL workflow) and *where* the data is transformed (ETL platform/BI tool/data warehouse). @@ -86,9 +91,9 @@ You may read other articles or technical documents that use ETL and ELT intercha In both processes, data from different data sources is extracted in similar ways. However, in ELT, data is then directly loaded into the target data platform versus being transformed in ETL. Now, via ELT workflows, both raw and transformed data can live in a data warehouse. In ELT workflows, data folks have the flexibility to model the data after they’ve had the opportunity to explore and analyze the raw data. ETL workflows can be more constraining since transformations happen immediately after extraction. We break down some of the other major differences between the two below: -| | ETL | ELT | +| | ELT | ETL | |---|---|---| -| Programming skills required | Often requires custom scripts or considerable data engineering lift to extract and transform data prior to load. | Often requires little to no code to extract and load data into your data warehouse. | +| Programming skills required | Often requires little to no code to extract and load data into your data warehouse. | Often requires custom scripts or considerable data engineering lift to extract and transform data prior to load. | | Separation of concerns | Extraction, load, and transformation layers can be explicitly separated out by different products. | ETL processes are often encapsulated in one product. | | Distribution of transformations | Since transformations take place last, there is greater flexibility in the modeling process. Worry first about getting your data in one place, then you have time to explore the data to understand the best way to transform it. | Because transformation occurs before data is loaded into the target location, teams must conduct thorough work prior to make sure data is transformed properly. Heavy transformations often take place downstream in the BI layer. | | [Data team roles](https://www.getdbt.com/data-teams/analytics-job-descriptions/) | ELT workflows empower data team members who know SQL to create their own extraction and loading pipelines and transformations. | ETL workflows often require teams with greater technical skill to create and maintain pipelines. | diff --git a/website/docs/terms/grain.md b/website/docs/terms/grain.md index bdd0ccce008..608a5c6391d 100644 --- a/website/docs/terms/grain.md +++ b/website/docs/terms/grain.md @@ -1,10 +1,15 @@ --- id: grain title: Data grain +description: Grain is the combination of columns at which records in a table are unique. Ideally, this is captured in a single column or a unique primary key. displayText: grain hoverSnippet: Your data's grain is the combination of columns at which records in a table are unique. Ideally, this is captured in a single column and a unique primary key. --- + + Data grain: What granularity means in terms of data modeling + + Grain is the combination of columns at which records in a table are unique. Ideally, this is captured in a single column, a unique , but even then, there is descriptive grain behind that unique id. Let’s look at some examples to better understand this concept. | user_id | address | diff --git a/website/docs/terms/idempotent.md b/website/docs/terms/idempotent.md index 764d02cfe45..245728953c4 100644 --- a/website/docs/terms/idempotent.md +++ b/website/docs/terms/idempotent.md @@ -1,19 +1,23 @@ --- id: idempotent title: Idempotent +description: Idempotent is an adjective to describe a process that gives you the same result no matter how many times you run it. displayText: idempotent -hoverSnippet: Idempotent describes a process that is independent of previous executions of that same process. +hoverSnippet: Idempotent describes a process that gives you the same result no matter how many times you run it. --- -:::important This page could use some love -This term would benefit from additional depth and examples. Have knowledge to contribute? [Create a discussion in the docs.getdbt.com GitHub repository](https://github.com/dbt-labs/docs.getdbt.com/discussions) to begin the process of becoming a glossary contributor! -::: -Idempotent is a way to describe a particular process. A process that is idempotent is independent of previous executions of that same process. + + What is idempotency and why is the concept important in data? + + +Idempotent is an adjective to describe a process that gives you the same result no matter how many times you run it. For a mathematical example, adding 1 changes the results, but multiplying by 1 is idempotent. When you add 1 to a number and then add 1 again, you get different results. If you multiply a number by 1 and multiply by 1 again, you do get the same result. -Idempotent is an intimidating word, but particularly important in a data transformation process. dbt was built to be idempotent to avoid those challenges of having to roll back changes and being able to independently refresh your data pipeline. +A more real-world example of idempotency is the process of saving a file in a word processor. Given the same inputs (i.e. the same document contents), clicking "_Save_" one time will leave your system in the exact same state as clicking "_Save_" five times in a row. + +A non-idempotent version of the "_Save_" button might do something like "Append the paragraph I just wrote to the end of the file". Doing _that_ five times in a row will _not_ leave you in the same state as doing it one time; your most recent paragraph would have duplicates. + +If word processors only gave us non-idempotent "Append paragraph" / "Update paragraph" / "Delete paragraph" operations, then saving our document changes would be a lot more difficult! We'd have to keep track of which paragraphs we previously saved, and either make sure to not save them again or have a process in place to regularly clean up duplicate paragraphs. The implementation of the "_Save_" button in word processors takes the collection of low-level non-idempotent filesystem operations (read/append/overwrite/delete), and systematically runs them in a certain order so that the _user_ doesn't have to deal with the non-idempotency. The user can just focus on writing -- choosing words, editing for clarity, ensuring paragraphs aren't too long, etc. -- and the word processor deals with making sure the words get persisted properly to disk. -:::note Note -This is the concept in analytics engineering that is the hardest to spell and the most important to learn. -::: +This word processing analogy is very similar to what dbt does for data transformation: it takes the collection of low-level non-idempotent database operations (`SELECT`/`INSERT`/`UPDATE`/`DELETE` -- collectively known as DML statements), and systematically runs them in a certain order so that analytics engineers don't have to deal with non-idempotency. We can just focus on the data -- [choosing good model and column names](https://docs.getdbt.com/blog/on-the-importance-of-naming), [documenting them](https://docs.getdbt.com/docs/about/viewpoint#documentation), [ensuring data consumers can understand them](https://docs.getdbt.com/docs/guides/best-practices#consider-the-information-architecture-of-your-data-warehouse), etc. -- and [`dbt run`](https://docs.getdbt.com/reference/commands/run) will make sure the database ends up in the right state. diff --git a/website/docs/terms/json.md b/website/docs/terms/json.md new file mode 100644 index 00000000000..652fb58cbe3 --- /dev/null +++ b/website/docs/terms/json.md @@ -0,0 +1,103 @@ +--- +id: json +title: JSON +description: JSON (JavaScript Object Notation) is a minimal format for semi-structured data used to capture relationships between fields and values. +displayText: JSON +hoverSnippet: JSON (JavaScript Object Notation) is a minimal format for semi-structured data used to capture relationships between fields and values. +--- + +JSON stands for JavaScript Object Notation. JSON is a minimal format which is great for processing data for applications. It can capture many types of relationships in a concise format and is a commonly used format for semi-structured data. The tables in your contain structured data (as opposed to semi-structured) where for each row, each field typically contains one value. Structured data, or tabular data, is intuitive and easy to read, but semi-structured data offers more flexibility. + +Let’s talk through what that looks like in practice so you can get a better sense of what we mean. + +## JSON syntax example + +When looking at data formatted in JSON, we say that the data is stored in **JSON objects**. These are composed of key-value pairs. JSON objects are enclosed in curly brackets (`{ }`) and each key-value pair is separated by a comma. Here’s an example: + +```json +order = {"customer_id":2947, "order_id":4923, "order_items":"cheesecake"} +``` + +`order` is the JSON object. `"customer_id":2947` is one of the key-value pairs within this JSON object. + +If I wanted to find the `customer_id`, I could return that value with `order["customer_id"]` or `order.customer_id`. It’s easy for us to simply read the `customer_id` just by looking at the JSON object in this example, but what if your JSON object contains hundreds of key-value pairs or complex nesting? Being aware of how to pull information out of JSON is essential if you’re working with it in the wild. + +A key feature of JSON is that it can contain data types that aren’t normally found in relational databases, namely **dictionaries** and **arrays**. Let’s break down what that means and then we’ll look at an example to pull everything together. + +### Dictionaries and arrays in JSON + +JSON inherits its syntax from JavaScript (JS) so dictionaries and arrays are formatted in the same way as they are in JS. Dictionaries are formatted just like JSON objects and consist of key-value pairs. Arrays are lists of values and they’re enclosed in square brackets (`[ ]`) and each value is separated by a comma, like so: + +```json +menu_items = ["cheesecake", "danish", "coffee"] +``` + +Individual values from an array can be called by referencing the location of a value within the array. Arrays are zero-indexed which means that the first item is at position 0 and we count up from there. + +- `menu_items[0]` will return “cheesecake” +- `menu_items[1]` will return “danish” +- `menu_items[2]` will return “coffee” + +Dictionaries and arrays can be nested in JSON objects as well as nested in each other. **Dictionaries and arrays can only be values. They can never be keys.** + +Here’s an example of a JSON object describing a tweet from [Twitter’s developer platform](https://developer.twitter.com/en/docs/twitter-api/v1/data-dictionary/overview). + +```json +tweet = +{ + "created_at": "Thu Apr 06 15:24:15 +0000 2017", + "id_str": "850006245121695744", + "text": "1\/ Today we\u2019re sharing our vision for the future of the Twitter API platform!\nhttps:\/\/t.co\/XweGngmxlP", + "user": { + "id": 2244994945, + "name": "Twitter Dev", + "screen_name": "TwitterDev", + "location": "Internet", + "url": "https:\/\/dev.twitter.com\/", + "description": "Your official source for Twitter Platform news, updates & events. Need technical help? Visit https:\/\/twittercommunity.com\/ \u2328\ufe0f #TapIntoTwitter" + }, + "place": { + }, + "entities": { + "hashtags": [ + ], + "urls": [ + { + "url": "https:\/\/t.co\/XweGngmxlP", + "unwound": { + "url": "https:\/\/cards.twitter.com\/cards\/18ce53wgo4h\/3xo1c", + "title": "Building the Future of the Twitter API Platform" + } + } + ], + "user_mentions": [ + ] + } +} +``` + +Here's a quick quiz to see if you're understanding the file's structure: + +
    +How would you call the user ID? +tweet['user']['id'] +
    + +
    +How would you call the unwound url? +tweet['entities']['urls'][0]['unwound']['url'] +
    + +As you can see, JSON objects can get complex pretty quickly. + +## Why is JSON important in modern analytics? + +Semi-structured data offers flexibility with the trade-off of being more complex. JSON doesn’t require a pre-defined schema. It allows nesting, values can be different data types, and it lends itself well to changes in the shape of the incoming data. As you can imagine, the above Tweet object would look very different if we tried to restructure it so it could fit into a table. It would be hard to read or we would lose information or both. + +## Use cases for JSON + +JSON is lightweight and often used to transfer data over a network connection. As we’ve seen, data from social media sites are often stored as JSON objects. JSON is also commonly how data from IoT sensors is formatted and you’ll often see JSON when using an API. + +## Conclusion + +The greatest strength of JSON also acts as its weakness—the data it contains informs the shape the object takes, rather than the other way around. Structured data is the bread and butter of analytics work, but a semi-structured format is an alternative option when a tabular format becomes too rigid to describe the relationships between different entities. \ No newline at end of file diff --git a/website/docs/terms/materialization.md b/website/docs/terms/materialization.md index 73f17cadc30..fdeaaebfcc8 100644 --- a/website/docs/terms/materialization.md +++ b/website/docs/terms/materialization.md @@ -1,14 +1,20 @@ --- id: materialization title: Materialization +description: A materialization is the exact Data Definition Language (DDL) that dbt will use when creating the model’s equivalent in a data warehouse. displayText: materialization hoverSnippet: The exact Data Definition Language (DDL) that dbt will use when creating the model’s equivalent in a data warehouse. --- + + + What does materialization mean in the context of dbt? + + :::important This page could use some love This term would benefit from additional depth and examples. Have knowledge to contribute? [Create a discussion in the docs.getdbt.com GitHub repository](https://github.com/dbt-labs/docs.getdbt.com/discussions) to begin the process of becoming a glossary contributor! ::: -The exact Data Definition Language (DDL) that dbt will use when creating the model’s equivalent in a data warehouse. It's the manner in which the data is represented, and each of those options is defined either canonically (tables, views, incremental), or bespoke. +The exact Data Definition Language (DDL) that dbt will use when creating the model’s equivalent in a . It's the manner in which the data is represented, and each of those options is defined either canonically (tables, views, incremental), or bespoke. It is important to consider the downstream impacts of your materialization choice on query run times and macro capabilities. diff --git a/website/docs/terms/primary-key.md b/website/docs/terms/primary-key.md index 41ee8ccf27f..2baa2ee8171 100644 --- a/website/docs/terms/primary-key.md +++ b/website/docs/terms/primary-key.md @@ -1,10 +1,15 @@ --- id: primary-key title: Primary key +description: A primary key is a non-null column in a database object that uniquely identifies each row. Primary keys take the form of a natural or surrogate key. displayText: primary key hoverSnippet: A primary key is a non-null column in a database object that uniquely identifies each row. --- + + Primary key in SQL (AKA Constraints) — dbt Labs + + A primary key is a non-null column in a database object that uniquely identifies each row. Primary keys take the form of a natural or . It’s important to note that for each or in your database, there must only be one primary key column per database object. At their core, you create and use these row-level unique identifiers to: @@ -16,7 +21,7 @@ At their core, you create and use these row-level unique identifiers to: One of the great things about data modeling is that there are very few rules to it. You have the flexibility to create the models and columns that are applicable to your business and the SQL you use to accomplish that is pretty much up to you and your team. _Having a primary key in each data model is pretty much the one rule you can’t break._ Without primary keys that are tested for non-nullness and uniqueness, duplicate or null records can slip undetected into your data models and cause counts to be incorrect. These two reasons coupled together can create a sense of distrust in the data and data team. -Use this glossary page to understand the importance of primary keys, how natural keys and surrogate keys differ, and how data warehouse support for primary keys varies. +Use this glossary page to understand the importance of primary keys, how natural keys and surrogate keys differ, and how support for primary keys varies. ## Types of primary keys @@ -103,7 +108,7 @@ In general for Redshift, it’s still good practice to define your primary keys ### Google BigQuery -BigQuery is pretty unique here in that it doesn’t support or enforce primary keys. If your team is on BigQuery, you’ll need to have some [pretty solid testing](https://docs.getdbt.com/docs/building-a-dbt-project/tests) in place to ensure your primary key fields are unique and non-null. +BigQuery is pretty unique here in that it doesn’t support or enforce primary keys. If your team is on BigQuery, you’ll need to have some [pretty solid testing](/docs/build/tests) in place to ensure your primary key fields are unique and non-null. ### Databricks @@ -136,7 +141,7 @@ If you don't have a field in your table that would act as a natural primary key, If your data warehouse doesn’t provide out-of-the box support and enforcement for primary keys, it’s important to clearly label and put your own constraints on primary key fields. This could look like: * **Creating a consistent naming convention for your primary keys**: You may see an `id` field or fields prefixed with `pk_` (ex. `pk_order_id`) to identify primary keys. You may also see the primary key be named as the obvious table grain (ex. In the jaffle shop’s `orders` table, the primary key is called `order_id`). -* **Adding automated [tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests) to your data models**: Use a data tool, such as dbt, to create not null and unique tests for your primary key fields. +* **Adding automated [tests](/docs/build/tests) to your data models**: Use a data tool, such as dbt, to create not null and unique tests for your primary key fields. ## Testing primary keys diff --git a/website/docs/terms/relational-database.md b/website/docs/terms/relational-database.md new file mode 100644 index 00000000000..8f05e5f4944 --- /dev/null +++ b/website/docs/terms/relational-database.md @@ -0,0 +1,88 @@ +--- +id: relational-database +title: Relational database +description: A relational database provides a structured way to store data into tables consisting of rows and columns. Different tables in a relational database can be joined together using common columns from each table, forming relationships. +displayText: relational database +hoverSnippet: A relational database provides a structured way to store data into tables consisting of rows and columns. Different tables in a relational database can be joined together using common columns from each table, forming relationships. +--- + + + Relational database: A way to get order out of data chaos + + +A relational database provides a structured way to store data into tables consisting of rows and columns. Different tables in a relational database can be joined together using common columns from each table, forming relationships. + +Analytics engineers use relational database models to process high volumes of data that, in its rawest form, is too difficult for an end user or analyst to read and comprehend. Thanks to these models, people can easily query, interpret, and derive insight out of data using the accessible SQL. + +Anyone who’s ever managed or modeled data will tell you that data points are only meaningful in relation to each other. The very philosophy behind data management and data analytics has centered on forming a narrative out of seemingly disparate elements. + +At the heart of this notion sits the relational database, which was first introduced by computer scientist E.F. Codd in the year 1970 — 13 years before the internet was even invented! + +## How relational databases work + +The legwork behind relational databases lies in establishing pre-defined relationships between tables, also called “entities”. For example, in the [jaffle_shop](https://github.com/dbt-labs/jaffle_shop) ecommerce store database where customers’ information is stored in a `customers` table and orders information is stored in an `orders` table, a relationship is defined such that each order is attributed to a customer. + +![](/img/docs/terms/relational-database/relation.png) + +The way relationships are defined is via primary keys and foreign keys. + +By definition, a is a column (or combination of columns as a surrogate key) which identifies a unique record. There can be only one primary key per table, and the primary key should be unique and not null. + +On the other hand, a foreign key is a column (or combination of columns) in one table that references the primary key in another table. In the above example, multiple orders can belong to one customer. Assuming that `id` is defined as the primary key for the `customers` table, `user_id` in the `orders` table would be the foreign key. + +In analytics engineering, where the focus is geared towards data modeling and creating a reporting layer for a BI tool, relational databases are a great fit. Data modeling defines how the data elements are related to each other, and a well-organized database is the cornerstone of effective data querying. + +## Use cases for relational databases + +Relational databases are best for structured data that can be organized into tables made up of rows and columns. Data teams rely on relational databases for storing transactional data, and also when data querying and data analysis is needed. + +### Transactional processing + +As mentioned earlier, relational databases are a great fit for transaction-oriented systems such as CRM tools, e-commerce platforms, or finance software. Companies tend to use relational databases when transactional consistency is required, as they offer a near failsafe environment for data accuracy and completion. When a transaction consists of several steps, the system treats the steps as a single transaction and assures that the operation follows an ‘all-or-nothing’ scenario, ie: the steps either all survive or all fail. + +### Modeling data and organizing it for analysis + +Relational databases support common data modeling techniques such as , Data Vault, or sometimes hybrid approaches that combine different modeling techniques. Such methodologies allow teams to organize their data into useful data structures. + +A data model is the overarching conceptual layer that organizes data entities and their relationships. The specific physical implementation of that data model including the definitions of data types and constraints constitutes the database schema. + +Having organized data entities also helps analytics engineers and analysts build meaningful queries that derive data in a format and granularity that is otherwise not directly available in the base database. + +Most analytics engineers have to deal with both relational (typically structured data) and non-relational data (typically unstructured data) coming in from multiple sources. The data is then transformed until it ultimately gets modeled into data entities using relational modeling approaches. More on non-relational databases in the following section, but in a nutshell, structured data is data that can be easily stored in a relational database system, while unstructured data is composed of formats that cannot easily (or at all) be broken down into tabular data. Common examples of unstructured data include video files, PDFs, audio files, and social media posts. + +Another popular format is semi-structured data which is inherently difficult to organize into rows and columns, but contains semantic markup that makes it possible to extract the underlying information. Some examples include XML and . + +Relational data warehouses provide relational databases that are specifically optimized for analytical querying rather than transaction processing. Increasingly, data warehouses are providing better support for unstructured data, or data that cannot be stored in relational tables. . + +Even when analytics engineers do not physically enforce relationships at the database level (many modern data warehouses allow for defining relational constraints but do not actually enforce them), they do follow a relational process. This process enables them to still organize the data into logical entities whenever possible, and in order to make sure that the data is not redundant and easily queryable. + +## Relational database vs. non-relational database + +The main difference between a relational and non-relational database is in how they store information. Relational databases are well-suited for data that is structured and store values in tables, and non-relational databases store data in a non-tabular form called unstructured data. + +As datasets are becoming dramatically more complex and less structured, the format of the ingested data can sometimes be unpredictable which makes the case for non-relational databases (also called NoSQL). + +NoSQL databases are also typically better suited for granular real-time monitoring. On the other hand, relational databases make it easier to look at transformed and aggregated data, making them a more appropriate fit for reporting and analytics. + +The below table summarizes the main differences between a relational and a non-relational database: + +| | Relational Database | Non-Relational Database | +|---|---|---| +| Data storage | Data is stored in tables. | Data is stored in document files, graph stores, key-value stores, or wide-column stores. | +| Data format | Data is structured. | Data is mainly unstructured. | +| Usage | Mainly used for recording transactions, data modeling, and data analysis. | Mainly used to ingest large volume real-time data streams. | +| Data Integrity | The relationships and constraints defined help ensure higher data integrity. | Non-relational databases do not guarantee data integrity. | +| Scalability | Scalable at a high price tag. | Highly scalable. | + +## Conclusion + +Relational databases store data in a systematic way, and support querying multiple tables together in order to generate business insights. + +Often starting off with unorganized and chaotic data, analytics engineers leverage relational databases to bring structure and consistency to their data. + +Relational databases also have a strong record of transactional consistency. While some companies are racing to embrace non-relational databases in order to handle the big volume of unstructured data, most of their workloads likely remain transactional and analytical in nature which is why relational databases are very common. + +## Further reading + +- [Glossary: Primary key](/terms/primary-key) +- [Glossary: Data warehouse](/terms/data-warehouse) diff --git a/website/docs/terms/reverse-etl.md b/website/docs/terms/reverse-etl.md new file mode 100644 index 00000000000..522ab364174 --- /dev/null +++ b/website/docs/terms/reverse-etl.md @@ -0,0 +1,93 @@ +--- +id: reverse-etl +title: Reverse ETL +description: Reverse ETL is the process of getting your transformed data stored in your data warehouse to end business platforms, such as sales CRMs and ad platforms. +displayText: reverse ETL +hoverSnippet: Reverse ETL is the process of getting your transformed data stored in your data warehouse to end business platforms, such as sales CRMs and ad platforms. +--- + + + Reverse ETL, demystified: What it is in plain english + + +Reverse ETL is the process of getting your transformed data stored in your data warehouse to end business platforms, such as sales CRMs and ad platforms. Once in an end platform, that data is often used to drive meaningful business actions, such as creating custom audiences in ad platforms, personalizing email campaigns, or supplementing data in a sales CRM. You may also hear about reverse ETL referred to as operational analytics or data activation. + +Reverse ETL efforts typically happen after data teams have set up their [modern data stack](https://www.getdbt.com/blog/future-of-the-modern-data-stack/) and ultimately have a consistent and automated way to extract, load, and transform data. Data teams are also often responsible for setting up the pipelines to send down data to business platforms, and business users are typically responsible for *using the data* once it gets to their end platform. + +Ultimately, reverse ETL is a way to put data where the work is already happening, support self-service efforts, and help business users derive real action out of their data. + +## How reverse ETL works + +In the reverse ETL process, transformed data is synced from a data warehouse to external tools in order to be leveraged by different business teams. + +![A diagram depicting how the reverse ETL process works. It starts with data being extract from data sources like email CRMs, Facebook Ad platforms, backend databases, and NetSuite. The raw data is then loaded into a data warehouse. After loading, the data is transformed and modeled. The modeled data is then loaded directly back into the tools that created the data, like Email CRMs, Facebook Ad platforms, and others so the insights are more accessible to business users.](/img/docs/terms/reverse-etl/reverse-etl-diagram.png) + +The power of reverse ETL comes from sending down *already transformed data* to business platforms. Raw data, while beautiful in its own way, typically lacks the structure, aggregations, and aliasing to be useful for end business users off the bat. After data teams transform data for business use in ELT pipelines, typically to expose in an end business intelligence (BI) tool, they can also send this cleaned and meaningful data to other platforms where business users can derive value using [reverse ETL tools](#reverse-etl-tools). + +Data teams can choose to write additional transformations that may need to happen for end business tools in reverse ETL tools themselves or by creating [additional models in dbt](https://getdbt.com/open-source-data-culture/reverse-etl-playbook/). + +## Why use reverse ETL? + +There’s a few reasons why your team may want to consider using reverse ETL: + +### Putting data where the work is happening + +While most data teams would love it if business users spent a significant portion of their time in their BI tool, that’s neither practical nor necessarily the most efficient use of their time. In the real world, many business users will spend some time in a BI tool, identify the data that could be useful in a platform they spend a significant amount of time in, and work with the data team to get that data where they need it. Users feel comfortable and confident in the systems they use everyday—why not put the data in the places that allow them to thrive? + +### Manipulating data to fit end platform requirements + +Reverse ETL helps you to put data your business users need *in the format their end tool expects*. Oftentimes, end platforms expect data fields to be named or cast in a certain way. Instead of business users having to manually input those values in the correct format, you can transform your data using a product like dbt or directly in a reverse ETL tool itself, and sync down that data in an automated way. + +### Supporting self-service efforts + +By sending down data-team approved data in reverse ETL pipelines, your business users have the flexibility to use that data however they see fit. Soon, your business users will be making audiences, testing personalization efforts, and running their end platform like a well-oiled, data-powered machine. + + +## Reverse ETL use cases + +Just as there are almost endless opportunities with data, there are many potential different use cases for reverse ETL. We won’t go into every possible option, but we’ll cover some of the common use cases that exist for reverse ETL efforts. + +### Personalization + +Reverse ETL allows business users to access data that they normally would only have access to in a BI tool *in the platforms they use every day*. As a result, business users can now use this data to personalize how they create ads, send emails, and communicate with customers. + +Personalization was all the hype a few years ago and now, you rarely ever see an email come into your inbox without some sort of personalization in-place. Data teams using reverse ETL are able to pass down important customer information, such as location, customer lifetime value (CLV), tenure, and other fields, that can be used to create personalized emails, establish appropriate messaging, and segment email flows. All we can say: the possibilities for personalization powered by reverse ETL are endless. + +### Sophisticated paid marketing initiatives + +At the end of the day, businesses want to serve the right ads to the right people (and at the right cost). A common use case for reverse ETL is for teams to use their customer data to create audiences in ad platforms to either serve specific audiences or create lookalikes. While ad platforms have gotten increasingly sophisticated with their algorithms to identify high-value audiences, it usually never hurts to try supplementing those audiences with your own data to create sophisticated audiences or lookalikes. + +### Self-service analytics culture + +We hinted at it earlier, but reverse ETL efforts can be an effective way to promote a self-service analytics culture. When data teams put the data where business users need it, business users can confidently access it on their own, driving even faster insights and action. Instead of requesting a data pull from a data team member, they can find the data they need directly within the platform that they use. Reverse ETL allows business users to act on metrics that have already been built out and validated by data teams without creating ad-hoc requests. + +### “Real-time” data + +It would be amiss if we didn’t mention reverse ETL and the notion of “real-time” data. While you can have the debate over the meaningfulness and true value-add of real-time data another time, reverse ETL can be a mechanism to bring data to end business platforms in a more “real-time” way. + +Data teams can set up syncs in reverse ETL tools at higher cadences, allowing business users to have the data they need, faster. Obviously, there’s some cost-benefit analysis on how often you want to be loading data via [ETL tools](https://www.getdbt.com/analytics-engineering/etl-tools-a-love-letter/) and hitting your data warehouse, but reverse ETL can help move data into external tools at a quicker cadence if deemed necessary. + +All this to say: move with caution in the realm of “real-time”, understand your stakeholders’ wants and decision-making process for real-time data, and work towards a solution that’s both practical and impactful. + +## Reverse ETL tools + +Reverse ETL tools typically establish the connection between your data warehouse and end business tools, offer an interface to create additional transformations or audiences, and support automation of downstream syncs. Below are some examples of tools that support reverse ETL pipelines. + +| Tool | Description | Open source option? | +|:---:|:---:|:---:| +| Hightouch | A platform to sync data models and create custom audiences for downstream business platforms. | :x: | +| Census | Another reverse ETL tool that can sync data from your data warehouse to your go-to-market tools. | :x: | +| Rudderstack | Also a CDP (customer data platform), Rudderstack additionally supports pushing down data and audience to external tools, such as ad platforms and email CRMs. | :white_check_mark: | +| Grouparoo | Grouparoo, part of Airbyte, is an open source framework to move data from data warehouses to different cloud-based tools. | :white_check_mark: | + +## Conclusion + +Reverse ETL enables you to sync your transformed data stored in your data warehouse to external platforms often used by marketing, sales, and product teams. It allows you to leverage your data in a whole new way. Reverse ETL pipelines can support personalization efforts, sophisticated paid marketing initiatives, and ultimately offer new ways to leverage your data. In doing this, it creates a self-service analytics culture where stakeholders can receive the data they need in, in the places they need, in an automated way. + +## Further reading + +If you’re interested learning more about reverse ETL and the impact it could have on your team, check out the following: + +- [How dbt Labs’s data team approaches reverse ETL](https://getdbt.com/open-source-data-culture/reverse-etl-playbook/) +- [The operational data warehouse in action: Reverse ETL, CDPs, and the future of data activation](https://www.getdbt.com/coalesce-2021/operational-data-warehouse-reverse-etl-cdp-data-activation/) +- [The analytics engineering guide: Operational analytics](https://www.getdbt.com/analytics-engineering/use-cases/operational-analytics/) diff --git a/website/docs/terms/subquery.md b/website/docs/terms/subquery.md index de94ea947f5..d7aecdd52cc 100644 --- a/website/docs/terms/subquery.md +++ b/website/docs/terms/subquery.md @@ -1,10 +1,14 @@ --- id: subquery title: Subquery in SQL +description: "A subquery is what the name suggests: a query within another query. The true inception of SQL. Subqueries are often used when you need to process data in several steps." displayText: subquery hoverSnippet: A subquery is a query within another query. Subqueries are often used when you need to process data in multiple steps. --- + + What is a Subquery in SQL and when are they useful? - dbt Labs + A subquery is what the name suggests: a query within another query. _The true inception of SQL_. Subqueries are often used when you need to process data in several steps. For the majority of subqueries you’ll see in actual practice, the inner query will execute first and pass its result to the outer query it's nested in. Subqueries are usually contrasted with Common Table Expressions (CTEs) as they have similar use cases. Unlike CTEs, which are usually separate `SELECT` statements within a query, subqueries are usually `SELECT` statements nested within a `JOIN`, `FROM`, or `WHERE` statement in a query. @@ -194,7 +198,7 @@ Again, choosing to use CTEs over subqueries is a personal choice. It may help to ## Data warehouse support for subqueries -Subqueries are likely to be supported across most, if not all, modern data warehouses. Please use this table to see more information about using subqueries in your specific data warehouse. +Subqueries are likely to be supported across most, if not all, modern data warehouses. Please use this table to see more information about using subqueries in your specific data warehouse. | Data warehouse | Supports subqueries? | |---|---| diff --git a/website/docs/terms/surrogate-key.md b/website/docs/terms/surrogate-key.md index 06e7d28eed6..718d3f53c92 100644 --- a/website/docs/terms/surrogate-key.md +++ b/website/docs/terms/surrogate-key.md @@ -1,10 +1,15 @@ --- id: surrogate-key title: Surrogate key +description: A surrogate key is a unique identifier derived from the data itself. It's commonly a hashed value of multiple columns that will create a unique id for each row. displayText: surrogate key hoverSnippet: A surrogate key is a unique identifier derived from the data itself. It often takes the form of a hashed value of multiple columns that will create a uniqueness constraint for each row. --- + + What is a surrogate key in database table? - dbt Labs + + A surrogate key is a unique identifier derived from the data itself. It often takes the form of a hashed value of multiple columns that will create a uniqueness constraint for each row. You will need to create a surrogate key for every table that doesn't have a natural . Why would you ever need to make a surrogate key? Shouldn’t all tables innately just have a field that uniquely identifies each row? Now that would be too easy… @@ -19,7 +24,7 @@ Primary keys can be established two ways: naturally or derived through the data * A __surrogate key__ is a hashed value of multiple fields in a dataset that create a uniqueness constraint on that dataset. You’ll essentially need to make a surrogate key in every table that lacks a natural key. :::note Note -You may also hear about primary keys being a form of a _constraint_ on a database object. Column constraints are specified in the to create or alter a database object. For data warehouses that support the enforcement of primary key constraints, this means that an error would be raised if a field's uniqueness or non-nullness was broken upon an `INSERT` or `UPDATE` statement. Most modern data warehouses don’t support _and_ enforce [primary key constraints](https://docs.getdbt.com/terms/primary-key#Data-warehouse-support-for-primary-keys), so it’s important to have [automated testing](https://docs.getdbt.com/blog/primary-key-testing#how-to-test-primary-keys-with-dbt) in-place to ensure your primary keys are unique and not null. +You may also hear about primary keys being a form of a _constraint_ on a database object. Column constraints are specified in the to create or alter a database object. For data warehouses that support the enforcement of primary key constraints, this means that an error would be raised if a field's uniqueness or non-nullness was broken upon an `INSERT` or `UPDATE` statement. Most modern data warehouses don’t support _and_ enforce [primary key constraints](https://docs.getdbt.com/terms/primary-key#Data-warehouse-support-for-primary-keys), so it’s important to have [automated testing](https://docs.getdbt.com/blog/primary-key-testing#how-to-test-primary-keys-with-dbt) in-place to ensure your primary keys are unique and not null. ::: ## How surrogate keys are created @@ -30,17 +35,17 @@ Let’s take this to an example. Below, there is a table you pull from an ad pla
    - - - - - - @@ -103,19 +108,19 @@ After executing this, the table would now have the `unique_id` field now uniquel
    calendar_date + calendar_date ad_id + ad_id impressions + impressions spend + spend clicks + clicks conversions + conversions
    - - - - - - - @@ -172,7 +177,7 @@ After executing this, the table would now have the `unique_id` field now uniquel Amazing, you just made a surrogate key! You can just move on to the next data model, right? No!! It’s critically important to test your surrogate keys for uniqueness and non-null values to ensure that the correct fields were chosen to create the surrogate key. -In order to test for null and unique values you can utilize code-based tests like [dbt tests](https://docs.getdbt.com/docs/building-a-dbt-project/tests), that can check fields for nullness and uniqueness. You can additionally utilize simple SQL queries or unit tests to check if surrogate key count and non-nullness is correct. +In order to test for null and unique values you can utilize code-based tests like [dbt tests](/docs/build/tests), that can check fields for nullness and uniqueness. You can additionally utilize simple SQL queries or unit tests to check if surrogate key count and non-nullness is correct. ## A note on hashing algorithms diff --git a/website/docs/terms/table.md b/website/docs/terms/table.md index 0aeb9216a4e..08289a84c8e 100644 --- a/website/docs/terms/table.md +++ b/website/docs/terms/table.md @@ -2,13 +2,13 @@ id: table title: Table displayText: table -hoverSnippet: In simplest terms, a table is the direct storage of data on disk in rows and columns. Think excel sheet with raw values in each of the cells. +hoverSnippet: In simplest terms, a table is the direct storage of data in rows and columns. Think excel sheet with raw values in each of the cells. --- :::important This page could use some love This term would benefit from additional depth and examples. Have knowledge to contribute? [Create a discussion in the docs.getdbt.com GitHub repository](https://github.com/dbt-labs/docs.getdbt.com/discussions) to begin the process of becoming a glossary contributor! ::: -In simplest terms, a table is the direct storage of data on disk in rows and columns. Think excel sheet with raw values in each of the cells. +In simplest terms, a table is the direct storage of data in rows and columns. Think excel sheet with raw values in each of the cells. Here is an example of a table: @@ -18,7 +18,7 @@ Here is an example of a table: | 02 | Bilbo | Baggins | bilbo@theshire.co.uk | | 03 | Gandalf | The Grey | greywizard1@gmail.com | -Tables do use storage in your data warehouse. The data can be queried directly because you are directly pulling from the raw data itself. If a particular table was created by underlying data, the table will not be automatically updated. +Tables do use storage in your . The data can be queried directly because you are directly pulling from the raw data itself. If a particular table was created by underlying data, the table will not be automatically updated. This table definition applies to most data warehouses, however, there are different flavors of tables for different warehouses. For example, Snowflake has transient and temporary tables that support different features. diff --git a/website/docs/terms/view.md b/website/docs/terms/view.md index f0a5cd8c498..7357e8c60c8 100644 --- a/website/docs/terms/view.md +++ b/website/docs/terms/view.md @@ -8,7 +8,7 @@ hoverSnippet: A view (as opposed to a table) is a defined passthrough SQL query This term would benefit from additional depth and examples. Have knowledge to contribute? [Create a discussion in the docs.getdbt.com GitHub repository](https://github.com/dbt-labs/docs.getdbt.com/discussions) to begin the process of becoming a glossary contributor! ::: -A view (as opposed to a ) is a defined passthrough SQL query that can be run against a database (or data warehouse). A view doesn’t store data, like a table does, but it defines the logic that you need to fetch the underlying data. +A view (as opposed to a ) is a defined passthrough SQL query that can be run against a database (or ). A view doesn’t store data, like a table does, but it defines the logic that you need to fetch the underlying data. For example, you might define a SQL view to count new users in a day: @@ -32,4 +32,4 @@ You shouldn’t expect a view in itself to be your final destination in terms of ## Further reading -- [Best practices guide on choosing table vs view materializations](docs/guides/best-practices#choose-your-materializations-wisely) +- [Best practices guide on choosing table vs view materializations](/guides/best-practices) diff --git a/website/docs/tutorial/building-your-first-project/build-your-first-models.md b/website/docs/tutorial/building-your-first-project/build-your-first-models.md deleted file mode 100644 index 839ba84608d..00000000000 --- a/website/docs/tutorial/building-your-first-project/build-your-first-models.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -title: Build your first models -id: build-your-first-models -description: "Now that you've set up the starter project, you can get to the fun part — building models!" ---- - -Now that you set up your sample project and had a successful run, you can get to the fun part — [building models](building-models)! You will take a sample query and turn it into a model in your dbt project. A model in dbt is a select statement. - -## Checkout a new git branch - -1. Make sure you're in the Develop interface. If you're not, click ![hamburger icon](/img/hamburger-icon.png), then click `Develop`. The master branch should now be set to read-only mode, so you'll need to create a new branch. - -2. Click **+ create new branch**, and name your branch `add-customers-model`. -
    - -
    - -## Build your first model - -1. If you're not already in the Develop interface, click ![hamburger icon](/img/hamburger-icon.png), and then `Develop`. -2. Click the `models` directory, then click ![kebob icon](/img/kebob-menu.png), then select **New file**. -3. Name the file `models/customers.sql`, then click **Create**. -4. Paste the following query into the file and click **save**. - - - -5. Enter `dbt run` in the command prompt at the bottom of the screen. You should get a successful run and see three models under DETAILS: -
    - -
    - -In the future, you would connect your business intelligence (BI) tools to these views and tables so they only read cleaned up data rather than raw data in your BI tool. - -#### FAQs - - - - - - - -## Change the way your model is materialized - - - -## Delete the example models - - - -## Build models on top of other models - - - -## Next steps - -Once you build your first model, you're ready to [test and document your project](tutorial/building-your-first-project/test-and-document-your-project). - - diff --git a/website/docs/tutorial/getting-set-up.md b/website/docs/tutorial/getting-set-up.md deleted file mode 100644 index 5e75dc14328..00000000000 --- a/website/docs/tutorial/getting-set-up.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -title: Getting set up in dbt Cloud -id: getting-set-up -description: "Set up a sample project to get you started using dbt Cloud." -sidebar_label: "Getting set up" ---- - -In this guide, you will learn how to set up dbt Cloud using a sample project and one of the most common data platforms. You can select from [BigQuery](/tutorial/getting-set-up/setting-up-bigquery), [Databricks](/tutorial/getting-set-up/setting-up-databricks), [Redshift](/tutorial/getting-set-up/setting-up-redshift), or [Snowflake](/tutorial/getting-set-up/setting-up-snowflake). If you're not sure, then try [BigQuery](/tutorial/getting-set-up/setting-up-bigquery). - -If you'd rather edit and run files locally using the dbt command line interface (dbt CLI) to connect to dbt Core, then you can refer to [Getting started using dbt Core](/tutorial/learning-more/getting-started-dbt-core). diff --git a/website/docs/tutorial/getting-started.md b/website/docs/tutorial/getting-started.md deleted file mode 100644 index 78ae5b4b6ef..00000000000 --- a/website/docs/tutorial/getting-started.md +++ /dev/null @@ -1,29 +0,0 @@ ---- -title: Getting started with dbt Cloud -id: getting-started -description: "Create your first dbt project using a SQL query." ---- - -dbt centralizes your analytics code and enables software engineering-style guardrails for data teams. dbt enables you to collaborate on SQL, version it through Git, and test and document your queries before safely deploying them to production and monitoring them. - -dbt takes your analytics code and helps you modularize it before you compile and run against your database. This enables you and your team to collaborate on a single source of truth for metrics and insights. A single source of truth, combined with the ability to define tests for your data, reduces errors when your analytics code is updated due to changes in your business and alerts you when a job fails - -This guide will show you how to set up dbt Cloud and perform some key tasks. These tasks will illustrate how dbt establishes standard practices for your work. - -In this guide, you will: - -* Set up a warehouse with sample data -* Connect the warehouse to dbt Cloud -* Add a Git repository to dbt Cloud -* Execute a dbt transformation using `dbt run` -* Schedule a job or transformation - -If you want a more in-depth learning experience, we recommend taking the dbt Fundamentals on our [dbt Learn online courses site](https://courses.getdbt.com/). - -## Before you begin - -Before you begin, you will need: - -* Warehouse with sample data. If you don't have this, you can use the [Big Query project](tutorial/getting-set-up/setting-up-bigquery), which leverages public data sets. -* Basic understanding of Git. -* Basic understanding of SQL. diff --git a/website/docs/tutorial/learning-more.md b/website/docs/tutorial/learning-more.md deleted file mode 100644 index 0ee54c5e8c7..00000000000 --- a/website/docs/tutorial/learning-more.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -title: "Learning more" -id: learning-more -description: "Learn more dbt skills and make more impact using dbt." -sidebar_label: "Learning more" ---- - -After building your first models, testing and documenting your project, and scheduling a job, you can explore different ways to enhance your dbt skills. - -To learn more: - -* Connect to dbt Core using the command line interface -* Refactor legacy SQL -* Learn how to use Jinja diff --git a/website/docs/useful_components.md b/website/docs/useful_components.md deleted file mode 100644 index ba0223692c4..00000000000 --- a/website/docs/useful_components.md +++ /dev/null @@ -1,105 +0,0 @@ -## Tabs for configs on multiple resources - - - - - - - -```sql - -{{ config( - -) }} - -select ... - - -``` - - - - - -```yml -models: - [](resource-path): - - -``` - - - - - - - - - -```yml -sources: - [](resource-path): - - -``` - - - - - - - - - -```yml -seeds: - [](resource-path): - - -``` - - - - - - - - - -```sql -{% snapshot [snapshot_name](snapshot_name) %} - -{{ config( - -) }} - -select ... - -{% endsnapshot %} - -``` - - - - - -```yml -snapshots: - [](resource-path): - enabled: true | false - -``` - - - - - - diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index bd4e8509d72..1d062efa979 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -1,34 +1,36 @@ -const path = require('path'); -const { versions, versionedPages } = require('./dbt-versions'); -require('dotenv').config() +const path = require("path"); +const math = require("remark-math"); +const katex = require("rehype-katex"); +const { versions, versionedPages } = require("./dbt-versions"); +require("dotenv").config(); /* Debugging */ var SITE_URL; -if (!process.env.CONTEXT || process.env.CONTEXT == 'production') { - SITE_URL = 'https://docs.getdbt.com'; +if (!process.env.CONTEXT || process.env.CONTEXT == "production") { + SITE_URL = "https://docs.getdbt.com"; } else { SITE_URL = process.env.DEPLOY_URL; } var GIT_BRANCH; -if (!process.env.CONTEXT || process.env.CONTEXT == 'production') { - GIT_BRANCH = 'current'; +if (!process.env.CONTEXT || process.env.CONTEXT == "production") { + GIT_BRANCH = "current"; } else { GIT_BRANCH = process.env.HEAD; } let { ALGOLIA_APP_ID, ALGOLIA_API_KEY, ALGOLIA_INDEX_NAME } = process.env; -let metatags = [] +let metatags = []; // If Not Current Branch, do not index site -if(GIT_BRANCH !== 'current') { +if (GIT_BRANCH !== "current") { metatags.push({ - tagName: 'meta', + tagName: "meta", attributes: { - name: 'robots', - content: 'noindex' - } - }) + name: "robots", + content: "noindex", + }, + }); } console.log("DEBUG: CONTEXT =", process.env.CONTEXT); @@ -38,29 +40,41 @@ console.log("DEBUG: ALGOLIA_INDEX_NAME = ", ALGOLIA_INDEX_NAME); console.log("DEBUG: metatags = ", metatags); var siteSettings = { - baseUrl: '/', - favicon: '/img/favicon.ico', - tagline: 'End user documentation, guides and technical reference for dbt (data build tool)', - title: 'dbt Docs', + baseUrl: "/", + tagline: "End user documentation, guides and technical reference for dbt", + title: "dbt Developer Hub", url: SITE_URL, - onBrokenLinks: 'warn', - + onBrokenLinks: "warn", + onBrokenMarkdownLinks: "throw", + trailingSlash: false, themeConfig: { - image: '/img/avatar.png', + image: "/img/avatar.png", colorMode: { - disableSwitch: true + defaultMode: "light", + disableSwitch: false, + respectPrefersColorScheme: true, }, // Adding non-empty strings for Algolia config // allows Docusaurus to run locally without .env file algolia: { - apiKey: ALGOLIA_API_KEY ? ALGOLIA_API_KEY : 'dbt', - indexName: ALGOLIA_INDEX_NAME ? ALGOLIA_INDEX_NAME : 'dbt', - appId: ALGOLIA_APP_ID ? ALGOLIA_APP_ID : 'dbt' + apiKey: ALGOLIA_API_KEY ? ALGOLIA_API_KEY : "dbt", + indexName: ALGOLIA_INDEX_NAME ? ALGOLIA_INDEX_NAME : "dbt", + appId: ALGOLIA_APP_ID ? ALGOLIA_APP_ID : "dbt", //debug: true, }, + announcementBar: { + id: "live_qa", + content: + "Take the 5-minute dbt Community Survey!", + backgroundColor: "#047377", + textColor: "#fff", + isCloseable: true, + }, + announcementBarActive: true, + announcementBarLink: "https://www.surveymonkey.com/r/XP2N8Z3", prism: { theme: (() => { - var theme = require('prism-react-renderer/themes/nightOwl'); + var theme = require("prism-react-renderer/themes/nightOwl"); // Add additional rule to nightowl theme in order to change // the color of YAML keys (to be different than values). // There weren't many Prism themes that differentiated @@ -71,183 +85,200 @@ var siteSettings = { style: { // color chosen from the nightowl theme palette // https://github.com/FormidableLabs/prism-react-renderer/blob/master/src/themes/nightOwl.js#L83 - color: "rgb(255, 203, 139)" - } + color: "rgb(255, 203, 139)", + }, }); - return theme + return theme; })(), - additionalLanguages: ['bash'], + additionalLanguages: ["bash"], }, navbar: { + hideOnScroll: true, logo: { - src: '/img/dbt-logo-light.svg', - alt: 'dbt Logo', + src: "/img/dbt-logo.svg", + srcDark: "img/dbt-logo-light.svg", + alt: "dbt Logo", }, items: [ { - to: '/docs/introduction', - label: 'Docs', - position: 'left', - activeBaseRegex: 'docs\/(?!(dbt-cloud))', - }, - { - to: '/reference/dbt_project.yml', - label: 'Reference', - position: 'left', - activeBasePath: 'reference' - }, - { - to: '/dbt-cli/cli-overview', - label: 'dbt CLI', - position: 'left', - activeBasePath: 'dbt-cli' - }, - { - to: '/docs/dbt-cloud/cloud-overview', - label: 'dbt Cloud', - position: 'left', - activeBasePath: 'docs/dbt-cloud' - }, - { - to: '/faqs/all', - label: 'FAQs', - position: 'left', - activeBasePath: 'faqs' + to: "/docs/introduction", + label: "Docs", + position: "left", + activeBaseRegex: "docs/(?!(dbt-cloud))", }, { - to: '/blog/', - label: 'Developer Blog', - position: 'right', - activeBasePath: 'blog' + to: "/reference/dbt_project.yml", + label: "Reference", + position: "left", + activeBasePath: "reference", }, { - label: 'Learn', - position: 'right', + label: "Learn", + position: "right", items: [ { - label: 'Getting started', - to: '/tutorial/getting-started', + label: 'Courses', + href: 'https://courses.getdbt.com', + }, + { + label: 'Guides', + to: '/guides/best-practices', }, { - label: 'Online courses', - href: 'https://courses.getdbt.com', + label: "Developer Blog", + to: "/blog", }, { - label: 'Live courses', - href: 'https://learn.getdbt.com/public', + label: "Glossary", + to: "/glossary", } ], }, { - label: 'Community', - position: 'right', + label: "Community", + position: "right", items: [ { - label: 'Maintaining a Slack Channel', - to: '/community/maintaining-a-channel', + label: "Join the Community", + to: "/community/join", }, { - label: 'dbt Slack', - href: 'https://community.getdbt.com/', + label: "Become a contributor", + to: "/community/contribute", }, { - label: 'Blog', - href: 'https://blog.getdbt.com', + label: "Community Forum", + to: "/community/forum", }, { - label: 'GitHub', - href: 'https://github.com/dbt-labs/dbt-core', + label: "Events", + to: "/community/events", }, - ] + ], + }, + { + label: "Create a free account", + to: "https://www.getdbt.com/signup/", + position: "right", + className: "nav-create-account button button--primary", }, ], }, footer: { - copyright: `Copyright © ${new Date().getFullYear()} dbt Labs™, Inc. All Rights Reserved. | Terms of Service | Privacy Policy | Security` + links: [ + { + html: ` + + + + `, + }, + ], + + copyright: `Copyright © ${new Date().getFullYear()} dbt Labs™, Inc. All Rights Reserved.`, }, }, presets: [ [ - '@docusaurus/preset-classic', + "@docusaurus/preset-classic", { theme: { - customCss: require.resolve('./src/css/custom.css'), + customCss: require.resolve("./src/css/custom.css"), }, docs: { - path: 'docs', - routeBasePath: '/', - sidebarPath: require.resolve('./sidebars.js'), + path: "docs", + routeBasePath: "/", + sidebarPath: require.resolve("./sidebars.js"), + remarkPlugins: [math], + rehypePlugins: [katex], - editUrl: 'https://github.com/dbt-labs/docs.getdbt.com/edit/' + GIT_BRANCH + '/website/', - showLastUpdateTime: false, + editUrl: + "https://github.com/dbt-labs/docs.getdbt.com/edit/" + + GIT_BRANCH + + "/website/", + showLastUpdateTime: true, //showLastUpdateAuthor: false, - sidebarCollapsible: true, + sidebarCollapsible: true, }, blog: { - blogTitle: 'dbt Developer Blog', - blogDescription: 'Technical tutorials from the dbt Community.', + blogTitle: "dbt Developer Blog", + blogDescription: "Technical tutorials from the dbt Community.", postsPerPage: 20, - blogSidebarTitle: 'Recent posts', + blogSidebarTitle: "Recent posts", blogSidebarCount: 5, + remarkPlugins: [math], + rehypePlugins: [katex], }, - }, ], ], plugins: [ - [ - path.resolve('plugins/insertMetaTags'), - { metatags } - ], - path.resolve('plugins/svg'), - path.resolve('plugins/customWebpackConfig'), - [ - path.resolve('plugins/buildGlobalData'), - { versionedPages } - ], - path.resolve('plugins/buildAuthorPages') + [path.resolve("plugins/insertMetaTags"), { metatags }], + path.resolve("plugins/svg"), + path.resolve("plugins/customWebpackConfig"), + [path.resolve("plugins/buildGlobalData"), { versionedPages }], + path.resolve("plugins/buildAuthorPages"), ], scripts: [ { - src: 'https://code.jquery.com/jquery-3.4.1.min.js', - defer: true + src: "https://code.jquery.com/jquery-3.4.1.min.js", + defer: true, }, { - src: 'https://cdn.jsdelivr.net/npm/featherlight@1.7.14/release/featherlight.min.js', - defer: true + src: "https://cdn.jsdelivr.net/npm/featherlight@1.7.14/release/featherlight.min.js", + defer: true, }, - '/js/gtm.js', - 'https://kit.fontawesome.com/7110474d41.js' + "/js/gtm.js", + "/js/onetrust.js", + "https://kit.fontawesome.com/7110474d41.js", ], stylesheets: [ - '/css/fonts.css', - '/css/entypo.css', - '/css/search.css', - '/css/api.css', - 'https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@400;500;600;700&display=swap', - 'https://fonts.googleapis.com/css2?family=Source+Code+Pro:wght@400;500;600;700&display=swap' + "/css/fonts.css", + "/css/entypo.css", + "/css/search.css", + "/css/api.css", + "https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@400;500;600;700&display=swap", + "https://fonts.googleapis.com/css2?family=Source+Code+Pro:wght@400;500;600;700&display=swap", + { + href: "https://cdn.jsdelivr.net/npm/katex@0.13.24/dist/katex.min.css", + type: "text/css", + integrity: + "sha384-odtC+0UGzzFL/6PNoE8rX/SPcQDXBJ+uRepguP4QkPCm2LBxH3FA3y+fKSiJ+AmM", + crossorigin: "anonymous", + }, + {rel: 'icon', href: '/img/favicon.png', type: 'image/png'}, + {rel: 'icon', href: '/img/favicon.svg', type: 'image/svg+xml'}, ], -} +}; // If versions json file found, add versions dropdown to nav -if(versions) { +if (versions) { siteSettings.themeConfig.navbar.items.push({ - label: 'Versions', - position: 'left', - className: 'nav-versioning', + label: "Versions", + position: "left", + className: "nav-versioning", items: [ ...versions.reduce((acc, version) => { - if(version?.version) { + if (version?.version) { acc.push({ label: `${version.version}`, - href: '#', - }) + href: "#", + }); } - return acc - }, []) - ] - },) + return acc; + }, []), + ], + }); } module.exports = siteSettings; diff --git a/website/functions/get-discourse-topics.js b/website/functions/get-discourse-topics.js new file mode 100644 index 00000000000..3c447604250 --- /dev/null +++ b/website/functions/get-discourse-topics.js @@ -0,0 +1,142 @@ +const axios = require('axios') + +async function getDiscourseTopics({ body }) { + const { DISCOURSE_API_KEY , DISCOURSE_USER } = process.env + + try { + // Set API endpoint and headers + let discourse_endpoint = `https://discourse.getdbt.com` + let headers = { + 'Accept': 'application/json', + 'Api-Key': DISCOURSE_API_KEY, + 'Api-Username': DISCOURSE_USER, + } + + const query = buildQueryString(body) + if(!query) throw new Error('Unable to build query string.') + + // Get topics from Discourse + let { data: { posts, topics } } = await axios.get(`${discourse_endpoint}/search?q=${query}`, { headers }) + + if(!topics || topics?.length <= 0) + throw new Error('Unable to get results from api request.') + + // Set author and like_count for topics if not querying by specific term + let allTopics = topics + if(!body?.term) { + allTopics = topics.reduce((topicsArr, topic) => { + // Get first post in topic + const firstTopicPost = posts?.find(post => + post?.post_number === 1 && + post?.topic_id === topic?.id + ) + // If post found + // Get username + if(firstTopicPost?.username) { + topic.author = firstTopicPost.username + } + // Get like count + if(firstTopicPost?.like_count) { + topic.like_count = firstTopicPost.like_count + } + + if(firstTopicPost?.blurb) { + topic.blurb = firstTopicPost.blurb + } + + // Push updated topic to array + topicsArr.push(topic) + + return topicsArr + }, []) + } + + // Return topics + return await returnResponse(200, allTopics) + } catch(err) { + // Log and return the error + console.log('err', err) + return await returnResponse(500, { error: 'Unable to get topics from Discourse.'}) + } +} + +async function returnResponse(status, res) { + const headers = { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': '*', + 'Access-Control-Allow-Methods': 'POST, OPTIONS' + } + const resObj = { + statusCode: status, + headers, + body: JSON.stringify(res) + } + return resObj +} + +function buildQueryString(body) { + if(!body) return null + + // start with empty query string + let query = '' + + // check param and apply to query if set + for(const [key, value] of Object.entries(JSON.parse(body))) { + // validate categories + // if valid, add to query string + if(validateItem({ key, value })) { + if(key === 'category') { + query += `#${value} ` + } else if(key === 'inString') { + query += `in:${value}` + } else if(key === 'status' && Array.isArray(value)) { + value?.map(item => { + query += `${key}:${item} ` + }) + } else { + query += `${key}:${value} ` + } + } + } + + if(query) { + const encodedQuery = encodeURIComponent(query) + return encodedQuery + } +} + +function validateItem({ key, value }) { + // predefined Discourse values + // https://docs.discourse.org/#tag/Search/operation/search + const inStringValues = ['title', 'first', 'pinned', 'wiki'] + const orderValues = ['latest', 'likes', 'views', 'latest_topic'] + const statusValues = ['open', 'closed', 'public', 'archived', 'noreplies', 'single_user', 'solved', 'unsolved'] + + // validate keys + if(key === 'inString') { + return inStringValues.includes(value) + ? true + : false + } else if(key === 'order') { + return orderValues.includes(value) + ? true + : false + } else if(key === 'status') { + if(Array.isArray(value)) { + let isValid = true + value?.map(item => { + if(!statusValues.includes(item)) isValid = false + }) + return isValid + } else { + return statusValues.includes(value) + ? true + : false + } + } else { + return true + } +} + +exports.handler = getDiscourseTopics diff --git a/website/functions/post-preview.js b/website/functions/post-preview.js new file mode 100644 index 00000000000..8d88c5f95cb --- /dev/null +++ b/website/functions/post-preview.js @@ -0,0 +1,7 @@ +export default function createPostPreview(description, charCount) { + if (description.length <= charCount) { return description }; + const clippedDesc = description.slice(0, charCount-1); + // return the version of the description clipped to the last instance of a space + // this is so there are no cut-off words. + return clippedDesc.slice(0, clippedDesc.lastIndexOf(" ")) + '...'; +} diff --git a/website/package-lock.json b/website/package-lock.json index a1b8e9c8d23..fc76d7c3fd9 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -12,16 +12,18 @@ "@docusaurus/preset-classic": "2.0.0-beta.17", "@docusaurus/theme-search-algolia": "2.0.0-beta.17", "@mdx-js/react": "^1.6.21", - "@svgr/webpack": "^5.5.0", + "@svgr/webpack": "^6.0.0", + "axios": "^0.27.2", "classnames": "^2.3.1", "clsx": "^1.1.1", "color": "^3.1.2", - "core-js": "^3.15.2", + "core-js": "^3.20.0", "file-loader": "^6.2.0", "fs": "0.0.2", "gray-matter": "^4.0.3", + "hast-util-is-element": "^1.1.0", "js-yaml": "^4.1.0", - "mobx": "^6.3.7", + "mobx": "^6.3.9", "node-polyfill-webpack-plugin": "^1.1.4", "prism-react-renderer": "^1.2.1", "react": "^17.0.1", @@ -29,8 +31,11 @@ "react-is": "^18.1.0", "react-tooltip": "^4.2.21", "redoc": "^2.0.0-rc.57", + "rehype-katex": "^5.0.0", + "remark-math": "^3.0.1", "slugify": "^1.6.1", "styled-components": "5.3.3", + "swiper": "^8.4.3", "url-loader": "^4.1.1" }, "devDependencies": { @@ -40,6 +45,7 @@ "@testing-library/react": "^12.1.3", "@testing-library/user-event": "^13.5.0", "css-loader": "^3.4.2", + "cypress": "^10.3.0", "dotenv": "^10.0.0", "jest": "^27.5.1", "jest-cli": "^27.5.1", @@ -51,18 +57,26 @@ "tty-browserify": "0.0.1" } }, + "node_modules/@adobe/css-tools": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.0.1.tgz", + "integrity": "sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g==", + "dev": true + }, "node_modules/@algolia/autocomplete-core": { - "version": "1.5.2", - "license": "MIT", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.7.1.tgz", + "integrity": "sha512-eiZw+fxMzNQn01S8dA/hcCpoWCOCwcIIEUtHHdzN5TGB3IpzLbuhqFeTfh2OUhhgkE8Uo17+wH+QJ/wYyQmmzg==", "dependencies": { - "@algolia/autocomplete-shared": "1.5.2" + "@algolia/autocomplete-shared": "1.7.1" } }, "node_modules/@algolia/autocomplete-preset-algolia": { - "version": "1.5.2", - "license": "MIT", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.1.tgz", + "integrity": "sha512-pJwmIxeJCymU1M6cGujnaIYcY3QPOVYZOXhFkWVM7IxKzy272BwCvMFMyc5NpG/QmiObBxjo7myd060OeTNJXg==", "dependencies": { - "@algolia/autocomplete-shared": "1.5.2" + "@algolia/autocomplete-shared": "1.7.1" }, "peerDependencies": { "@algolia/client-search": "^4.9.1", @@ -70,117 +84,134 @@ } }, "node_modules/@algolia/autocomplete-shared": { - "version": "1.5.2", - "license": "MIT" + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.1.tgz", + "integrity": "sha512-eTmGVqY3GeyBTT8IWiB2K5EuURAqhnumfktAEoHxfDY2o7vg2rSnO16ZtIG0fMgt3py28Vwgq42/bVEuaQV7pg==" }, "node_modules/@algolia/cache-browser-local-storage": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.14.2.tgz", + "integrity": "sha512-FRweBkK/ywO+GKYfAWbrepewQsPTIEirhi1BdykX9mxvBPtGNKccYAxvGdDCumU1jL4r3cayio4psfzKMejBlA==", "dependencies": { - "@algolia/cache-common": "4.13.0" + "@algolia/cache-common": "4.14.2" } }, "node_modules/@algolia/cache-common": { - "version": "4.13.0", - "license": "MIT" + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.14.2.tgz", + "integrity": "sha512-SbvAlG9VqNanCErr44q6lEKD2qoK4XtFNx9Qn8FK26ePCI8I9yU7pYB+eM/cZdS9SzQCRJBbHUumVr4bsQ4uxg==" }, "node_modules/@algolia/cache-in-memory": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.14.2.tgz", + "integrity": "sha512-HrOukWoop9XB/VFojPv1R5SVXowgI56T9pmezd/djh2JnVN/vXswhXV51RKy4nCpqxyHt/aGFSq2qkDvj6KiuQ==", "dependencies": { - "@algolia/cache-common": "4.13.0" + "@algolia/cache-common": "4.14.2" } }, "node_modules/@algolia/client-account": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.14.2.tgz", + "integrity": "sha512-WHtriQqGyibbb/Rx71YY43T0cXqyelEU0lB2QMBRXvD2X0iyeGl4qMxocgEIcbHyK7uqE7hKgjT8aBrHqhgc1w==", "dependencies": { - "@algolia/client-common": "4.13.0", - "@algolia/client-search": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/client-common": "4.14.2", + "@algolia/client-search": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "node_modules/@algolia/client-analytics": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.14.2.tgz", + "integrity": "sha512-yBvBv2mw+HX5a+aeR0dkvUbFZsiC4FKSnfqk9rrfX+QrlNOKEhCG0tJzjiOggRW4EcNqRmaTULIYvIzQVL2KYQ==", "dependencies": { - "@algolia/client-common": "4.13.0", - "@algolia/client-search": "4.13.0", - "@algolia/requester-common": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/client-common": "4.14.2", + "@algolia/client-search": "4.14.2", + "@algolia/requester-common": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "node_modules/@algolia/client-common": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.14.2.tgz", + "integrity": "sha512-43o4fslNLcktgtDMVaT5XwlzsDPzlqvqesRi4MjQz2x4/Sxm7zYg5LRYFol1BIhG6EwxKvSUq8HcC/KxJu3J0Q==", "dependencies": { - "@algolia/requester-common": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/requester-common": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "node_modules/@algolia/client-personalization": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.14.2.tgz", + "integrity": "sha512-ACCoLi0cL8CBZ1W/2juehSltrw2iqsQBnfiu/Rbl9W2yE6o2ZUb97+sqN/jBqYNQBS+o0ekTMKNkQjHHAcEXNw==", "dependencies": { - "@algolia/client-common": "4.13.0", - "@algolia/requester-common": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/client-common": "4.14.2", + "@algolia/requester-common": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "node_modules/@algolia/client-search": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.14.2.tgz", + "integrity": "sha512-L5zScdOmcZ6NGiVbLKTvP02UbxZ0njd5Vq9nJAmPFtjffUSOGEp11BmD2oMJ5QvARgx2XbX4KzTTNS5ECYIMWw==", "dependencies": { - "@algolia/client-common": "4.13.0", - "@algolia/requester-common": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/client-common": "4.14.2", + "@algolia/requester-common": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "node_modules/@algolia/events": { "version": "4.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz", + "integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==" }, "node_modules/@algolia/logger-common": { - "version": "4.13.0", - "license": "MIT" + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.14.2.tgz", + "integrity": "sha512-/JGlYvdV++IcMHBnVFsqEisTiOeEr6cUJtpjz8zc0A9c31JrtLm318Njc72p14Pnkw3A/5lHHh+QxpJ6WFTmsA==" }, "node_modules/@algolia/logger-console": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.14.2.tgz", + "integrity": "sha512-8S2PlpdshbkwlLCSAB5f8c91xyc84VM9Ar9EdfE9UmX+NrKNYnWR1maXXVDQQoto07G1Ol/tYFnFVhUZq0xV/g==", "dependencies": { - "@algolia/logger-common": "4.13.0" + "@algolia/logger-common": "4.14.2" } }, "node_modules/@algolia/requester-browser-xhr": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.14.2.tgz", + "integrity": "sha512-CEh//xYz/WfxHFh7pcMjQNWgpl4wFB85lUMRyVwaDPibNzQRVcV33YS+63fShFWc2+42YEipFGH2iPzlpszmDw==", "dependencies": { - "@algolia/requester-common": "4.13.0" + "@algolia/requester-common": "4.14.2" } }, "node_modules/@algolia/requester-common": { - "version": "4.13.0", - "license": "MIT" + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.14.2.tgz", + "integrity": "sha512-73YQsBOKa5fvVV3My7iZHu1sUqmjjfs9TteFWwPwDmnad7T0VTCopttcsM3OjLxZFtBnX61Xxl2T2gmG2O4ehg==" }, "node_modules/@algolia/requester-node-http": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.14.2.tgz", + "integrity": "sha512-oDbb02kd1o5GTEld4pETlPZLY0e+gOSWjWMJHWTgDXbv9rm/o2cF7japO6Vj1ENnrqWvLBmW1OzV9g6FUFhFXg==", "dependencies": { - "@algolia/requester-common": "4.13.0" + "@algolia/requester-common": "4.14.2" } }, "node_modules/@algolia/transporter": { - "version": "4.13.0", - "license": "MIT", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.14.2.tgz", + "integrity": "sha512-t89dfQb2T9MFQHidjHcfhh6iGMNwvuKUvojAj+JsrHAGbuSy7yE4BylhLX6R0Q1xYRoC4Vvv+O5qIw/LdnQfsQ==", "dependencies": { - "@algolia/cache-common": "4.13.0", - "@algolia/logger-common": "4.13.0", - "@algolia/requester-common": "4.13.0" + "@algolia/cache-common": "4.14.2", + "@algolia/logger-common": "4.14.2", + "@algolia/requester-common": "4.14.2" } }, "node_modules/@ampproject/remapping": { "version": "2.2.0", - "license": "Apache-2.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", + "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", "dependencies": { "@jridgewell/gen-mapping": "^0.1.0", "@jridgewell/trace-mapping": "^0.3.9" @@ -190,36 +221,39 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", "dependencies": { - "@babel/highlight": "^7.16.7" + "@babel/highlight": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/compat-data": { - "version": "7.17.10", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.4.tgz", + "integrity": "sha512-CHIGpJcUQ5lU9KrPHTjBMhVwQG6CQjxfg36fGXl3qk/Gik1WwWachaXFuo0uCWJT/mStOKtcbFJCaVLihC1CMw==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.17.10", - "license": "MIT", + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz", + "integrity": "sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==", "dependencies": { "@ampproject/remapping": "^2.1.0", - "@babel/code-frame": "^7.16.7", - "@babel/generator": "^7.17.10", - "@babel/helper-compilation-targets": "^7.17.10", - "@babel/helper-module-transforms": "^7.17.7", - "@babel/helpers": "^7.17.9", - "@babel/parser": "^7.17.10", - "@babel/template": "^7.16.7", - "@babel/traverse": "^7.17.10", - "@babel/types": "^7.17.10", + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.3", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-module-transforms": "^7.19.0", + "@babel/helpers": "^7.19.0", + "@babel/parser": "^7.19.3", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.3", + "@babel/types": "^7.19.3", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -236,51 +270,69 @@ }, "node_modules/@babel/core/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/generator": { - "version": "7.17.10", - "license": "MIT", + "version": "7.19.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.19.5.tgz", + "integrity": "sha512-DxbNz9Lz4aMZ99qPpO1raTbcrI1ZeYh+9NR9qhfkQIbFtVEqotHojEBxHzmxhVONkGt6VyrqVQcgpefMy9pqcg==", "dependencies": { - "@babel/types": "^7.17.10", - "@jridgewell/gen-mapping": "^0.1.0", + "@babel/types": "^7.19.4", + "@jridgewell/gen-mapping": "^0.3.2", "jsesc": "^2.5.1" }, "engines": { "node": ">=6.9.0" } }, + "node_modules/@babel/generator/node_modules/@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz", + "integrity": "sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==", "dependencies": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz", + "integrity": "sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw==", "dependencies": { - "@babel/helper-explode-assignable-expression": "^7.16.7", - "@babel/types": "^7.16.7" + "@babel/helper-explode-assignable-expression": "^7.18.6", + "@babel/types": "^7.18.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.17.10", - "license": "MIT", + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz", + "integrity": "sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==", "dependencies": { - "@babel/compat-data": "^7.17.10", - "@babel/helper-validator-option": "^7.16.7", - "browserslist": "^4.20.2", + "@babel/compat-data": "^7.19.3", + "@babel/helper-validator-option": "^7.18.6", + "browserslist": "^4.21.3", "semver": "^6.3.0" }, "engines": { @@ -292,22 +344,24 @@ }, "node_modules/@babel/helper-compilation-targets/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.17.9", - "license": "MIT", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.19.0.tgz", + "integrity": "sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-function-name": "^7.17.9", - "@babel/helper-member-expression-to-functions": "^7.17.7", - "@babel/helper-optimise-call-expression": "^7.16.7", - "@babel/helper-replace-supers": "^7.16.7", - "@babel/helper-split-export-declaration": "^7.16.7" + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.19.0", + "@babel/helper-member-expression-to-functions": "^7.18.9", + "@babel/helper-optimise-call-expression": "^7.18.6", + "@babel/helper-replace-supers": "^7.18.9", + "@babel/helper-split-export-declaration": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -317,11 +371,12 @@ } }, "node_modules/@babel/helper-create-regexp-features-plugin": { - "version": "7.17.0", - "license": "MIT", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.19.0.tgz", + "integrity": "sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "regexpu-core": "^5.0.1" + "@babel/helper-annotate-as-pure": "^7.18.6", + "regexpu-core": "^5.1.0" }, "engines": { "node": ">=6.9.0" @@ -331,13 +386,12 @@ } }, "node_modules/@babel/helper-define-polyfill-provider": { - "version": "0.3.1", - "license": "MIT", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz", + "integrity": "sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww==", "dependencies": { - "@babel/helper-compilation-targets": "^7.13.0", - "@babel/helper-module-imports": "^7.12.13", - "@babel/helper-plugin-utils": "^7.13.0", - "@babel/traverse": "^7.13.0", + "@babel/helper-compilation-targets": "^7.17.7", + "@babel/helper-plugin-utils": "^7.16.7", "debug": "^4.1.1", "lodash.debounce": "^4.0.8", "resolve": "^1.14.2", @@ -349,206 +403,235 @@ }, "node_modules/@babel/helper-define-polyfill-provider/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/helper-environment-visitor": { - "version": "7.16.7", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.16.7" - }, + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", + "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-explode-assignable-expression": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz", + "integrity": "sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==", "dependencies": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-function-name": { - "version": "7.17.9", - "license": "MIT", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz", + "integrity": "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==", "dependencies": { - "@babel/template": "^7.16.7", - "@babel/types": "^7.17.0" + "@babel/template": "^7.18.10", + "@babel/types": "^7.19.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-hoist-variables": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", + "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", "dependencies": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.17.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz", + "integrity": "sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg==", "dependencies": { - "@babel/types": "^7.17.0" + "@babel/types": "^7.18.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", + "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", "dependencies": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.17.7", - "license": "MIT", - "dependencies": { - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-module-imports": "^7.16.7", - "@babel/helper-simple-access": "^7.17.7", - "@babel/helper-split-export-declaration": "^7.16.7", - "@babel/helper-validator-identifier": "^7.16.7", - "@babel/template": "^7.16.7", - "@babel/traverse": "^7.17.3", - "@babel/types": "^7.17.0" + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz", + "integrity": "sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==", + "dependencies": { + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-simple-access": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/helper-validator-identifier": "^7.18.6", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.0", + "@babel/types": "^7.19.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz", + "integrity": "sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==", "dependencies": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.16.7", - "license": "MIT", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz", + "integrity": "sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.16.8", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz", + "integrity": "sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-wrap-function": "^7.16.8", - "@babel/types": "^7.16.8" + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-wrap-function": "^7.18.9", + "@babel/types": "^7.18.9" }, "engines": { "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-replace-supers": { - "version": "7.16.7", - "license": "MIT", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.19.1.tgz", + "integrity": "sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw==", "dependencies": { - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-member-expression-to-functions": "^7.16.7", - "@babel/helper-optimise-call-expression": "^7.16.7", - "@babel/traverse": "^7.16.7", - "@babel/types": "^7.16.7" + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-member-expression-to-functions": "^7.18.9", + "@babel/helper-optimise-call-expression": "^7.18.6", + "@babel/traverse": "^7.19.1", + "@babel/types": "^7.19.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-simple-access": { - "version": "7.17.7", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.19.4.tgz", + "integrity": "sha512-f9Xq6WqBFqaDfbCzn2w85hwklswz5qsKlh7f08w4Y9yhJHpnNC0QemtSkK5YyOY8kPGvyiwdzZksGUhnGdaUIg==", "dependencies": { - "@babel/types": "^7.17.0" + "@babel/types": "^7.19.4" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.16.0", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz", + "integrity": "sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw==", "dependencies": { - "@babel/types": "^7.16.0" + "@babel/types": "^7.18.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-split-export-declaration": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", + "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", "dependencies": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" }, "engines": { "node": ">=6.9.0" } }, + "node_modules/@babel/helper-string-parser": { + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz", + "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==", + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.16.7", - "license": "MIT", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", + "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-wrap-function": { - "version": "7.16.8", - "license": "MIT", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.19.0.tgz", + "integrity": "sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg==", "dependencies": { - "@babel/helper-function-name": "^7.16.7", - "@babel/template": "^7.16.7", - "@babel/traverse": "^7.16.8", - "@babel/types": "^7.16.8" + "@babel/helper-function-name": "^7.19.0", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.0", + "@babel/types": "^7.19.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.17.9", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.19.4.tgz", + "integrity": "sha512-G+z3aOx2nfDHwX/kyVii5fJq+bgscg89/dJNWpYeKeBv3v9xX8EIabmx1k6u9LS04H7nROFVRVK+e3k0VHp+sw==", "dependencies": { - "@babel/template": "^7.16.7", - "@babel/traverse": "^7.17.9", - "@babel/types": "^7.17.0" + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.4", + "@babel/types": "^7.19.4" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.17.9", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", "dependencies": { - "@babel/helper-validator-identifier": "^7.16.7", + "@babel/helper-validator-identifier": "^7.18.6", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, @@ -557,8 +640,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.17.10", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.19.4.tgz", + "integrity": "sha512-qpVT7gtuOLjWeDTKLkJ6sryqLliBaFpAtGeqw5cs5giLldvh+Ch0plqnUMKoVAUS6ZEueQQiZV+p5pxtPitEsA==", "bin": { "parser": "bin/babel-parser.js" }, @@ -567,10 +651,11 @@ } }, "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz", + "integrity": "sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -580,12 +665,13 @@ } }, "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz", + "integrity": "sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0", - "@babel/plugin-proposal-optional-chaining": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.18.9", + "@babel/plugin-proposal-optional-chaining": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -595,11 +681,13 @@ } }, "node_modules/@babel/plugin-proposal-async-generator-functions": { - "version": "7.16.8", - "license": "MIT", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.19.1.tgz", + "integrity": "sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-remap-async-to-generator": "^7.16.8", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-remap-async-to-generator": "^7.18.9", "@babel/plugin-syntax-async-generators": "^7.8.4" }, "engines": { @@ -610,11 +698,12 @@ } }, "node_modules/@babel/plugin-proposal-class-properties": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", + "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -624,11 +713,12 @@ } }, "node_modules/@babel/plugin-proposal-class-static-block": { - "version": "7.17.6", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz", + "integrity": "sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.17.6", - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-class-static-block": "^7.14.5" }, "engines": { @@ -639,10 +729,11 @@ } }, "node_modules/@babel/plugin-proposal-dynamic-import": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz", + "integrity": "sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-dynamic-import": "^7.8.3" }, "engines": { @@ -653,10 +744,11 @@ } }, "node_modules/@babel/plugin-proposal-export-namespace-from": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz", + "integrity": "sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.9", "@babel/plugin-syntax-export-namespace-from": "^7.8.3" }, "engines": { @@ -667,10 +759,11 @@ } }, "node_modules/@babel/plugin-proposal-json-strings": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz", + "integrity": "sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3" }, "engines": { @@ -681,10 +774,11 @@ } }, "node_modules/@babel/plugin-proposal-logical-assignment-operators": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz", + "integrity": "sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.9", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" }, "engines": { @@ -695,10 +789,11 @@ } }, "node_modules/@babel/plugin-proposal-nullish-coalescing-operator": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz", + "integrity": "sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" }, "engines": { @@ -709,10 +804,11 @@ } }, "node_modules/@babel/plugin-proposal-numeric-separator": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz", + "integrity": "sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-numeric-separator": "^7.10.4" }, "engines": { @@ -723,14 +819,15 @@ } }, "node_modules/@babel/plugin-proposal-object-rest-spread": { - "version": "7.17.3", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.19.4.tgz", + "integrity": "sha512-wHmj6LDxVDnL+3WhXteUBaoM1aVILZODAUjg11kHqG4cOlfgMQGxw6aCgvrXrmaJR3Bn14oZhImyCPZzRpC93Q==", "dependencies": { - "@babel/compat-data": "^7.17.0", - "@babel/helper-compilation-targets": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/compat-data": "^7.19.4", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-plugin-utils": "^7.19.0", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-transform-parameters": "^7.16.7" + "@babel/plugin-transform-parameters": "^7.18.8" }, "engines": { "node": ">=6.9.0" @@ -740,10 +837,11 @@ } }, "node_modules/@babel/plugin-proposal-optional-catch-binding": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz", + "integrity": "sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" }, "engines": { @@ -754,11 +852,12 @@ } }, "node_modules/@babel/plugin-proposal-optional-chaining": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz", + "integrity": "sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0", + "@babel/helper-plugin-utils": "^7.18.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.18.9", "@babel/plugin-syntax-optional-chaining": "^7.8.3" }, "engines": { @@ -769,11 +868,12 @@ } }, "node_modules/@babel/plugin-proposal-private-methods": { - "version": "7.16.11", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz", + "integrity": "sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.16.10", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -783,12 +883,13 @@ } }, "node_modules/@babel/plugin-proposal-private-property-in-object": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz", + "integrity": "sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-create-class-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-private-property-in-object": "^7.14.5" }, "engines": { @@ -799,11 +900,12 @@ } }, "node_modules/@babel/plugin-proposal-unicode-property-regex": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz", + "integrity": "sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=4" @@ -814,7 +916,8 @@ }, "node_modules/@babel/plugin-syntax-async-generators": { "version": "7.8.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -824,8 +927,9 @@ }, "node_modules/@babel/plugin-syntax-bigint": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", "dev": true, - "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -835,7 +939,8 @@ }, "node_modules/@babel/plugin-syntax-class-properties": { "version": "7.12.13", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", "dependencies": { "@babel/helper-plugin-utils": "^7.12.13" }, @@ -845,7 +950,8 @@ }, "node_modules/@babel/plugin-syntax-class-static-block": { "version": "7.14.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, @@ -858,7 +964,8 @@ }, "node_modules/@babel/plugin-syntax-dynamic-import": { "version": "7.8.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -868,7 +975,8 @@ }, "node_modules/@babel/plugin-syntax-export-namespace-from": { "version": "7.8.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.3" }, @@ -876,10 +984,25 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-syntax-import-assertions": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz", + "integrity": "sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-syntax-import-meta": { "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", "dev": true, - "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, @@ -889,7 +1012,8 @@ }, "node_modules/@babel/plugin-syntax-json-strings": { "version": "7.8.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -898,10 +1022,11 @@ } }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz", + "integrity": "sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -912,7 +1037,8 @@ }, "node_modules/@babel/plugin-syntax-logical-assignment-operators": { "version": "7.10.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, @@ -922,7 +1048,8 @@ }, "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { "version": "7.8.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -932,7 +1059,8 @@ }, "node_modules/@babel/plugin-syntax-numeric-separator": { "version": "7.10.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, @@ -942,7 +1070,8 @@ }, "node_modules/@babel/plugin-syntax-object-rest-spread": { "version": "7.8.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -952,7 +1081,8 @@ }, "node_modules/@babel/plugin-syntax-optional-catch-binding": { "version": "7.8.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -962,7 +1092,8 @@ }, "node_modules/@babel/plugin-syntax-optional-chaining": { "version": "7.8.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -972,7 +1103,8 @@ }, "node_modules/@babel/plugin-syntax-private-property-in-object": { "version": "7.14.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, @@ -985,7 +1117,8 @@ }, "node_modules/@babel/plugin-syntax-top-level-await": { "version": "7.14.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, @@ -997,10 +1130,11 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.17.10", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz", + "integrity": "sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1010,10 +1144,11 @@ } }, "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz", + "integrity": "sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1023,12 +1158,13 @@ } }, "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.16.8", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz", + "integrity": "sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag==", "dependencies": { - "@babel/helper-module-imports": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-remap-async-to-generator": "^7.16.8" + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-remap-async-to-generator": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1038,10 +1174,11 @@ } }, "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz", + "integrity": "sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1051,10 +1188,11 @@ } }, "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.16.7", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.19.4.tgz", + "integrity": "sha512-934S2VLLlt2hRJwPf4MczaOr4hYF0z+VKPwqTNxyKX7NthTiPfhuKFWQZHXRM0vh/wo/VyXB3s4bZUNA08l+tQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.19.0" }, "engines": { "node": ">=6.9.0" @@ -1064,16 +1202,18 @@ } }, "node_modules/@babel/plugin-transform-classes": { - "version": "7.16.7", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-function-name": "^7.16.7", - "@babel/helper-optimise-call-expression": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-replace-supers": "^7.16.7", - "@babel/helper-split-export-declaration": "^7.16.7", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.19.0.tgz", + "integrity": "sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-compilation-targets": "^7.19.0", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.19.0", + "@babel/helper-optimise-call-expression": "^7.18.6", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-replace-supers": "^7.18.9", + "@babel/helper-split-export-declaration": "^7.18.6", "globals": "^11.1.0" }, "engines": { @@ -1084,10 +1224,11 @@ } }, "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz", + "integrity": "sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1097,10 +1238,11 @@ } }, "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.17.7", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.19.4.tgz", + "integrity": "sha512-t0j0Hgidqf0aM86dF8U+vXYReUgJnlv4bZLsyoPnwZNrGY+7/38o8YjaELrvHeVfTZao15kjR0PVv0nju2iduA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.19.0" }, "engines": { "node": ">=6.9.0" @@ -1110,11 +1252,12 @@ } }, "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz", + "integrity": "sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1124,10 +1267,11 @@ } }, "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz", + "integrity": "sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1137,11 +1281,12 @@ } }, "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz", + "integrity": "sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==", "dependencies": { - "@babel/helper-builder-binary-assignment-operator-visitor": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1151,10 +1296,11 @@ } }, "node_modules/@babel/plugin-transform-for-of": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz", + "integrity": "sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1164,12 +1310,13 @@ } }, "node_modules/@babel/plugin-transform-function-name": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz", + "integrity": "sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==", "dependencies": { - "@babel/helper-compilation-targets": "^7.16.7", - "@babel/helper-function-name": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-compilation-targets": "^7.18.9", + "@babel/helper-function-name": "^7.18.9", + "@babel/helper-plugin-utils": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1179,10 +1326,11 @@ } }, "node_modules/@babel/plugin-transform-literals": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz", + "integrity": "sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1192,10 +1340,11 @@ } }, "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz", + "integrity": "sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1205,11 +1354,12 @@ } }, "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz", + "integrity": "sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg==", "dependencies": { - "@babel/helper-module-transforms": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-module-transforms": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "engines": { @@ -1221,18 +1371,20 @@ }, "node_modules/@babel/plugin-transform-modules-amd/node_modules/babel-plugin-dynamic-import-node": { "version": "2.3.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "dependencies": { "object.assign": "^4.1.0" } }, "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.17.9", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz", + "integrity": "sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q==", "dependencies": { - "@babel/helper-module-transforms": "^7.17.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-simple-access": "^7.17.7", + "@babel/helper-module-transforms": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-simple-access": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "engines": { @@ -1244,19 +1396,21 @@ }, "node_modules/@babel/plugin-transform-modules-commonjs/node_modules/babel-plugin-dynamic-import-node": { "version": "2.3.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "dependencies": { "object.assign": "^4.1.0" } }, "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.17.8", - "license": "MIT", - "dependencies": { - "@babel/helper-hoist-variables": "^7.16.7", - "@babel/helper-module-transforms": "^7.17.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-validator-identifier": "^7.16.7", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.19.0.tgz", + "integrity": "sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A==", + "dependencies": { + "@babel/helper-hoist-variables": "^7.18.6", + "@babel/helper-module-transforms": "^7.19.0", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-validator-identifier": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "engines": { @@ -1268,17 +1422,19 @@ }, "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/babel-plugin-dynamic-import-node": { "version": "2.3.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "dependencies": { "object.assign": "^4.1.0" } }, "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz", + "integrity": "sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==", "dependencies": { - "@babel/helper-module-transforms": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-module-transforms": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1288,10 +1444,12 @@ } }, "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.17.10", - "license": "MIT", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.19.1.tgz", + "integrity": "sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.17.0" + "@babel/helper-create-regexp-features-plugin": "^7.19.0", + "@babel/helper-plugin-utils": "^7.19.0" }, "engines": { "node": ">=6.9.0" @@ -1301,10 +1459,11 @@ } }, "node_modules/@babel/plugin-transform-new-target": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz", + "integrity": "sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1314,11 +1473,12 @@ } }, "node_modules/@babel/plugin-transform-object-super": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz", + "integrity": "sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-replace-supers": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-replace-supers": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1328,10 +1488,11 @@ } }, "node_modules/@babel/plugin-transform-parameters": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz", + "integrity": "sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1341,10 +1502,11 @@ } }, "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz", + "integrity": "sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1354,10 +1516,11 @@ } }, "node_modules/@babel/plugin-transform-react-constant-elements": { - "version": "7.17.6", - "license": "MIT", + "version": "7.18.12", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.12.tgz", + "integrity": "sha512-Q99U9/ttiu+LMnRU8psd23HhvwXmKWDQIpocm0JKaICcZHnw+mdQbHm6xnSy7dOl8I5PELakYtNBubNQlBXbZw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1367,10 +1530,11 @@ } }, "node_modules/@babel/plugin-transform-react-display-name": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz", + "integrity": "sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1380,14 +1544,15 @@ } }, "node_modules/@babel/plugin-transform-react-jsx": { - "version": "7.17.3", - "license": "MIT", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.19.0.tgz", + "integrity": "sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-module-imports": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/plugin-syntax-jsx": "^7.16.7", - "@babel/types": "^7.17.0" + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/plugin-syntax-jsx": "^7.18.6", + "@babel/types": "^7.19.0" }, "engines": { "node": ">=6.9.0" @@ -1397,10 +1562,11 @@ } }, "node_modules/@babel/plugin-transform-react-jsx-development": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz", + "integrity": "sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==", "dependencies": { - "@babel/plugin-transform-react-jsx": "^7.16.7" + "@babel/plugin-transform-react-jsx": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1410,11 +1576,12 @@ } }, "node_modules/@babel/plugin-transform-react-pure-annotations": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz", + "integrity": "sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1424,9 +1591,11 @@ } }, "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.17.9", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz", + "integrity": "sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ==", "dependencies": { + "@babel/helper-plugin-utils": "^7.18.6", "regenerator-transform": "^0.15.0" }, "engines": { @@ -1437,10 +1606,11 @@ } }, "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz", + "integrity": "sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1450,14 +1620,15 @@ } }, "node_modules/@babel/plugin-transform-runtime": { - "version": "7.17.10", - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "babel-plugin-polyfill-corejs2": "^0.3.0", - "babel-plugin-polyfill-corejs3": "^0.5.0", - "babel-plugin-polyfill-regenerator": "^0.3.0", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.19.1.tgz", + "integrity": "sha512-2nJjTUFIzBMP/f/miLxEK9vxwW/KUXsdvN4sR//TmuDhe6yU2h57WmIOE12Gng3MDP/xpjUV/ToZRdcf8Yj4fA==", + "dependencies": { + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-plugin-utils": "^7.19.0", + "babel-plugin-polyfill-corejs2": "^0.3.3", + "babel-plugin-polyfill-corejs3": "^0.6.0", + "babel-plugin-polyfill-regenerator": "^0.4.1", "semver": "^6.3.0" }, "engines": { @@ -1469,16 +1640,18 @@ }, "node_modules/@babel/plugin-transform-runtime/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz", + "integrity": "sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1488,11 +1661,12 @@ } }, "node_modules/@babel/plugin-transform-spread": { - "version": "7.16.7", - "license": "MIT", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.19.0.tgz", + "integrity": "sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0" + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1502,10 +1676,11 @@ } }, "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz", + "integrity": "sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1515,10 +1690,11 @@ } }, "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz", + "integrity": "sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1528,10 +1704,11 @@ } }, "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz", + "integrity": "sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1541,12 +1718,13 @@ } }, "node_modules/@babel/plugin-transform-typescript": { - "version": "7.16.8", - "license": "MIT", + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.19.3.tgz", + "integrity": "sha512-z6fnuK9ve9u/0X0rRvI9MY0xg+DOUaABDYOe+/SQTxtlptaBB/V9JIUxJn6xp3lMBeb9qe8xSFmHU35oZDXD+w==", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/plugin-syntax-typescript": "^7.16.7" + "@babel/helper-create-class-features-plugin": "^7.19.0", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/plugin-syntax-typescript": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1556,10 +1734,11 @@ } }, "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.10", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz", + "integrity": "sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" }, "engines": { "node": ">=6.9.0" @@ -1569,11 +1748,12 @@ } }, "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz", + "integrity": "sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1583,35 +1763,37 @@ } }, "node_modules/@babel/preset-env": { - "version": "7.17.10", - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.17.10", - "@babel/helper-compilation-targets": "^7.17.10", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-validator-option": "^7.16.7", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.16.7", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.16.7", - "@babel/plugin-proposal-async-generator-functions": "^7.16.8", - "@babel/plugin-proposal-class-properties": "^7.16.7", - "@babel/plugin-proposal-class-static-block": "^7.17.6", - "@babel/plugin-proposal-dynamic-import": "^7.16.7", - "@babel/plugin-proposal-export-namespace-from": "^7.16.7", - "@babel/plugin-proposal-json-strings": "^7.16.7", - "@babel/plugin-proposal-logical-assignment-operators": "^7.16.7", - "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7", - "@babel/plugin-proposal-numeric-separator": "^7.16.7", - "@babel/plugin-proposal-object-rest-spread": "^7.17.3", - "@babel/plugin-proposal-optional-catch-binding": "^7.16.7", - "@babel/plugin-proposal-optional-chaining": "^7.16.7", - "@babel/plugin-proposal-private-methods": "^7.16.11", - "@babel/plugin-proposal-private-property-in-object": "^7.16.7", - "@babel/plugin-proposal-unicode-property-regex": "^7.16.7", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.19.4.tgz", + "integrity": "sha512-5QVOTXUdqTCjQuh2GGtdd7YEhoRXBMVGROAtsBeLGIbIz3obCBIfRMT1I3ZKkMgNzwkyCkftDXSSkHxnfVf4qg==", + "dependencies": { + "@babel/compat-data": "^7.19.4", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-validator-option": "^7.18.6", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.18.6", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.18.9", + "@babel/plugin-proposal-async-generator-functions": "^7.19.1", + "@babel/plugin-proposal-class-properties": "^7.18.6", + "@babel/plugin-proposal-class-static-block": "^7.18.6", + "@babel/plugin-proposal-dynamic-import": "^7.18.6", + "@babel/plugin-proposal-export-namespace-from": "^7.18.9", + "@babel/plugin-proposal-json-strings": "^7.18.6", + "@babel/plugin-proposal-logical-assignment-operators": "^7.18.9", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.18.6", + "@babel/plugin-proposal-numeric-separator": "^7.18.6", + "@babel/plugin-proposal-object-rest-spread": "^7.19.4", + "@babel/plugin-proposal-optional-catch-binding": "^7.18.6", + "@babel/plugin-proposal-optional-chaining": "^7.18.9", + "@babel/plugin-proposal-private-methods": "^7.18.6", + "@babel/plugin-proposal-private-property-in-object": "^7.18.6", + "@babel/plugin-proposal-unicode-property-regex": "^7.18.6", "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-class-properties": "^7.12.13", "@babel/plugin-syntax-class-static-block": "^7.14.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-import-assertions": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", @@ -1621,44 +1803,44 @@ "@babel/plugin-syntax-optional-chaining": "^7.8.3", "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5", - "@babel/plugin-transform-arrow-functions": "^7.16.7", - "@babel/plugin-transform-async-to-generator": "^7.16.8", - "@babel/plugin-transform-block-scoped-functions": "^7.16.7", - "@babel/plugin-transform-block-scoping": "^7.16.7", - "@babel/plugin-transform-classes": "^7.16.7", - "@babel/plugin-transform-computed-properties": "^7.16.7", - "@babel/plugin-transform-destructuring": "^7.17.7", - "@babel/plugin-transform-dotall-regex": "^7.16.7", - "@babel/plugin-transform-duplicate-keys": "^7.16.7", - "@babel/plugin-transform-exponentiation-operator": "^7.16.7", - "@babel/plugin-transform-for-of": "^7.16.7", - "@babel/plugin-transform-function-name": "^7.16.7", - "@babel/plugin-transform-literals": "^7.16.7", - "@babel/plugin-transform-member-expression-literals": "^7.16.7", - "@babel/plugin-transform-modules-amd": "^7.16.7", - "@babel/plugin-transform-modules-commonjs": "^7.17.9", - "@babel/plugin-transform-modules-systemjs": "^7.17.8", - "@babel/plugin-transform-modules-umd": "^7.16.7", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.17.10", - "@babel/plugin-transform-new-target": "^7.16.7", - "@babel/plugin-transform-object-super": "^7.16.7", - "@babel/plugin-transform-parameters": "^7.16.7", - "@babel/plugin-transform-property-literals": "^7.16.7", - "@babel/plugin-transform-regenerator": "^7.17.9", - "@babel/plugin-transform-reserved-words": "^7.16.7", - "@babel/plugin-transform-shorthand-properties": "^7.16.7", - "@babel/plugin-transform-spread": "^7.16.7", - "@babel/plugin-transform-sticky-regex": "^7.16.7", - "@babel/plugin-transform-template-literals": "^7.16.7", - "@babel/plugin-transform-typeof-symbol": "^7.16.7", - "@babel/plugin-transform-unicode-escapes": "^7.16.7", - "@babel/plugin-transform-unicode-regex": "^7.16.7", + "@babel/plugin-transform-arrow-functions": "^7.18.6", + "@babel/plugin-transform-async-to-generator": "^7.18.6", + "@babel/plugin-transform-block-scoped-functions": "^7.18.6", + "@babel/plugin-transform-block-scoping": "^7.19.4", + "@babel/plugin-transform-classes": "^7.19.0", + "@babel/plugin-transform-computed-properties": "^7.18.9", + "@babel/plugin-transform-destructuring": "^7.19.4", + "@babel/plugin-transform-dotall-regex": "^7.18.6", + "@babel/plugin-transform-duplicate-keys": "^7.18.9", + "@babel/plugin-transform-exponentiation-operator": "^7.18.6", + "@babel/plugin-transform-for-of": "^7.18.8", + "@babel/plugin-transform-function-name": "^7.18.9", + "@babel/plugin-transform-literals": "^7.18.9", + "@babel/plugin-transform-member-expression-literals": "^7.18.6", + "@babel/plugin-transform-modules-amd": "^7.18.6", + "@babel/plugin-transform-modules-commonjs": "^7.18.6", + "@babel/plugin-transform-modules-systemjs": "^7.19.0", + "@babel/plugin-transform-modules-umd": "^7.18.6", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.19.1", + "@babel/plugin-transform-new-target": "^7.18.6", + "@babel/plugin-transform-object-super": "^7.18.6", + "@babel/plugin-transform-parameters": "^7.18.8", + "@babel/plugin-transform-property-literals": "^7.18.6", + "@babel/plugin-transform-regenerator": "^7.18.6", + "@babel/plugin-transform-reserved-words": "^7.18.6", + "@babel/plugin-transform-shorthand-properties": "^7.18.6", + "@babel/plugin-transform-spread": "^7.19.0", + "@babel/plugin-transform-sticky-regex": "^7.18.6", + "@babel/plugin-transform-template-literals": "^7.18.9", + "@babel/plugin-transform-typeof-symbol": "^7.18.9", + "@babel/plugin-transform-unicode-escapes": "^7.18.10", + "@babel/plugin-transform-unicode-regex": "^7.18.6", "@babel/preset-modules": "^0.1.5", - "@babel/types": "^7.17.10", - "babel-plugin-polyfill-corejs2": "^0.3.0", - "babel-plugin-polyfill-corejs3": "^0.5.0", - "babel-plugin-polyfill-regenerator": "^0.3.0", - "core-js-compat": "^3.22.1", + "@babel/types": "^7.19.4", + "babel-plugin-polyfill-corejs2": "^0.3.3", + "babel-plugin-polyfill-corejs3": "^0.6.0", + "babel-plugin-polyfill-regenerator": "^0.4.1", + "core-js-compat": "^3.25.1", "semver": "^6.3.0" }, "engines": { @@ -1670,14 +1852,16 @@ }, "node_modules/@babel/preset-env/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/preset-modules": { "version": "0.1.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz", + "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==", "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", @@ -1690,15 +1874,16 @@ } }, "node_modules/@babel/preset-react": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.18.6.tgz", + "integrity": "sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-validator-option": "^7.16.7", - "@babel/plugin-transform-react-display-name": "^7.16.7", - "@babel/plugin-transform-react-jsx": "^7.16.7", - "@babel/plugin-transform-react-jsx-development": "^7.16.7", - "@babel/plugin-transform-react-pure-annotations": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-validator-option": "^7.18.6", + "@babel/plugin-transform-react-display-name": "^7.18.6", + "@babel/plugin-transform-react-jsx": "^7.18.6", + "@babel/plugin-transform-react-jsx-development": "^7.18.6", + "@babel/plugin-transform-react-pure-annotations": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1708,12 +1893,13 @@ } }, "node_modules/@babel/preset-typescript": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.18.6.tgz", + "integrity": "sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-validator-option": "^7.16.7", - "@babel/plugin-transform-typescript": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-validator-option": "^7.18.6", + "@babel/plugin-transform-typescript": "^7.18.6" }, "engines": { "node": ">=6.9.0" @@ -1723,8 +1909,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.17.9", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.19.4.tgz", + "integrity": "sha512-EXpLCrk55f+cYqmHsSR+yD/0gAIMxxA9QK9lnQWzhMCvt+YmoBN7Zx94s++Kv0+unHk39vxNO8t+CMA2WSS3wA==", "dependencies": { "regenerator-runtime": "^0.13.4" }, @@ -1733,10 +1920,11 @@ } }, "node_modules/@babel/runtime-corejs3": { - "version": "7.17.9", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.19.4.tgz", + "integrity": "sha512-HzjQ8+dzdx7dmZy4DQ8KV8aHi/74AjEbBGTFutBmg/pd3dY5/q1sfuOGPTFGEytlQhWoeVXqcK5BwMgIkRkNDQ==", "dependencies": { - "core-js-pure": "^3.20.2", + "core-js-pure": "^3.25.1", "regenerator-runtime": "^0.13.4" }, "engines": { @@ -1744,29 +1932,31 @@ } }, "node_modules/@babel/template": { - "version": "7.16.7", - "license": "MIT", + "version": "7.18.10", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz", + "integrity": "sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==", "dependencies": { - "@babel/code-frame": "^7.16.7", - "@babel/parser": "^7.16.7", - "@babel/types": "^7.16.7" + "@babel/code-frame": "^7.18.6", + "@babel/parser": "^7.18.10", + "@babel/types": "^7.18.10" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.17.10", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.16.7", - "@babel/generator": "^7.17.10", - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-function-name": "^7.17.9", - "@babel/helper-hoist-variables": "^7.16.7", - "@babel/helper-split-export-declaration": "^7.16.7", - "@babel/parser": "^7.17.10", - "@babel/types": "^7.17.10", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.19.4.tgz", + "integrity": "sha512-w3K1i+V5u2aJUOXBFFC5pveFLmtq1s3qcdDNC2qRI6WPBQIDaKFqXxDEqDO/h1dQ3HjsZoZMyIy6jGLq0xtw+g==", + "dependencies": { + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.4", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.19.0", + "@babel/helper-hoist-variables": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/parser": "^7.19.4", + "@babel/types": "^7.19.4", "debug": "^4.1.0", "globals": "^11.1.0" }, @@ -1775,10 +1965,12 @@ } }, "node_modules/@babel/types": { - "version": "7.17.10", - "license": "MIT", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.19.4.tgz", + "integrity": "sha512-M5LK7nAeS6+9j7hAq+b3fQs+pNfUtTGq+yFFfHnauFA8zQtLRfmuipmsKDKKLuyG+wC8ABW43A153YNawNTEtw==", "dependencies": { - "@babel/helper-validator-identifier": "^7.16.7", + "@babel/helper-string-parser": "^7.19.4", + "@babel/helper-validator-identifier": "^7.19.1", "to-fast-properties": "^2.0.0" }, "engines": { @@ -1787,39 +1979,117 @@ }, "node_modules/@bcoe/v8-coverage": { "version": "0.2.3", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true }, "node_modules/@colors/colors": { "version": "1.5.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", + "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", "optional": true, "engines": { "node": ">=0.1.90" } }, + "node_modules/@cypress/request": { + "version": "2.88.10", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.10.tgz", + "integrity": "sha512-Zp7F+R93N0yZyG34GutyTNr+okam7s/Fzc1+i3kcqOP8vk6OuajuE9qZJ6Rs+10/1JFtXFYMdyarnU1rZuJesg==", + "dev": true, + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "http-signature": "~1.3.6", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@cypress/request/node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/@cypress/xvfb": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", + "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", + "dev": true, + "dependencies": { + "debug": "^3.1.0", + "lodash.once": "^4.1.1" + } + }, + "node_modules/@cypress/xvfb/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, "node_modules/@docsearch/css": { - "version": "3.0.0", - "license": "MIT" + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.2.1.tgz", + "integrity": "sha512-gaP6TxxwQC+K8D6TRx5WULUWKrcbzECOPA2KCVMuI+6C7dNiGUk5yXXzVhc5sld79XKYLnO9DRTI4mjXDYkh+g==" }, "node_modules/@docsearch/react": { - "version": "3.0.0", - "license": "MIT", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.2.1.tgz", + "integrity": "sha512-EzTQ/y82s14IQC5XVestiK/kFFMe2aagoYFuTAIfIb/e+4FU7kSMKonRtLwsCiLQHmjvNQq+HO+33giJ5YVtaQ==", "dependencies": { - "@algolia/autocomplete-core": "1.5.2", - "@algolia/autocomplete-preset-algolia": "1.5.2", - "@docsearch/css": "3.0.0", + "@algolia/autocomplete-core": "1.7.1", + "@algolia/autocomplete-preset-algolia": "1.7.1", + "@docsearch/css": "3.2.1", "algoliasearch": "^4.0.0" }, "peerDependencies": { - "@types/react": ">= 16.8.0 < 18.0.0", - "react": ">= 16.8.0 < 18.0.0", - "react-dom": ">= 16.8.0 < 18.0.0" + "@types/react": ">= 16.8.0 < 19.0.0", + "react": ">= 16.8.0 < 19.0.0", + "react-dom": ">= 16.8.0 < 19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } } }, "node_modules/@docusaurus/core": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-2.0.0-beta.17.tgz", + "integrity": "sha512-iNdW7CsmHNOgc4PxD9BFxa+MD8+i7ln7erOBkF3FSMMPnsKUeVqsR3rr31aLmLZRlTXMITSPLxlXwtBZa3KPCw==", "dependencies": { "@babel/core": "^7.17.5", "@babel/generator": "^7.17.3", @@ -1906,408 +2176,140 @@ "react-dom": "^16.8.4 || ^17.0.0" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-add-jsx-attribute": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "node_modules/@docusaurus/core/node_modules/css-loader": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.7.1.tgz", + "integrity": "sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw==", + "dependencies": { + "icss-utils": "^5.1.0", + "postcss": "^8.4.7", + "postcss-modules-extract-imports": "^3.0.0", + "postcss-modules-local-by-default": "^4.0.0", + "postcss-modules-scope": "^3.0.0", + "postcss-modules-values": "^4.0.0", + "postcss-value-parser": "^4.2.0", + "semver": "^7.3.5" }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-remove-jsx-attribute": { - "version": "6.0.0", - "license": "MIT", "engines": { - "node": ">=10" + "node": ">= 12.13.0" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "type": "opencollective", + "url": "https://opencollective.com/webpack" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "webpack": "^5.0.0" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "6.0.0", - "license": "MIT", + "node_modules/@docusaurus/core/node_modules/icss-utils": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz", + "integrity": "sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==", "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "node": "^10 || ^12 || >= 14" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "postcss": "^8.1.0" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "6.0.0", - "license": "MIT", + "node_modules/@docusaurus/core/node_modules/postcss-modules-extract-imports": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz", + "integrity": "sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw==", "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "node": "^10 || ^12 || >= 14" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "postcss": "^8.1.0" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-svg-dynamic-title": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" + "node_modules/@docusaurus/core/node_modules/postcss-modules-local-by-default": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz", + "integrity": "sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ==", + "dependencies": { + "icss-utils": "^5.0.0", + "postcss-selector-parser": "^6.0.2", + "postcss-value-parser": "^4.1.0" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "engines": { + "node": "^10 || ^12 || >= 14" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "postcss": "^8.1.0" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-svg-em-dimensions": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" + "node_modules/@docusaurus/core/node_modules/postcss-modules-scope": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz", + "integrity": "sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg==", + "dependencies": { + "postcss-selector-parser": "^6.0.4" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "engines": { + "node": "^10 || ^12 || >= 14" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "postcss": "^8.1.0" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-transform-react-native-svg": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" + "node_modules/@docusaurus/core/node_modules/postcss-modules-values": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz", + "integrity": "sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==", + "dependencies": { + "icss-utils": "^5.0.0" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "engines": { + "node": "^10 || ^12 || >= 14" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "postcss": "^8.1.0" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-plugin-transform-svg-component": { - "version": "6.2.0", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node_modules/@docusaurus/cssnano-preset": { + "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-beta.17.tgz", + "integrity": "sha512-DoBwtLjJ9IY9/lNMHIEdo90L4NDayvU28nLgtjR2Sc6aBIMEB/3a5Ndjehnp+jZAkwcDdNASA86EkZVUyz1O1A==", + "dependencies": { + "cssnano-preset-advanced": "^5.1.12", + "postcss": "^8.4.7", + "postcss-sort-media-queries": "^4.2.1" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/babel-preset": { - "version": "6.2.0", - "license": "MIT", + "node_modules/@docusaurus/logger": { + "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.0.0-beta.17.tgz", + "integrity": "sha512-F9JDl06/VLg+ylsvnq9NpILSUeWtl0j4H2LtlLzX5gufEL4dGiCMlnUzYdHl7FSHSzYJ0A/R7vu0SYofsexC4w==", "dependencies": { - "@svgr/babel-plugin-add-jsx-attribute": "^6.0.0", - "@svgr/babel-plugin-remove-jsx-attribute": "^6.0.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.0.0", - "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.0.0", - "@svgr/babel-plugin-svg-dynamic-title": "^6.0.0", - "@svgr/babel-plugin-svg-em-dimensions": "^6.0.0", - "@svgr/babel-plugin-transform-react-native-svg": "^6.0.0", - "@svgr/babel-plugin-transform-svg-component": "^6.2.0" + "chalk": "^4.1.2", + "tslib": "^2.3.1" }, "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": ">=14" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/core": { - "version": "6.2.1", - "license": "MIT", + "node_modules/@docusaurus/logger/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { - "@svgr/plugin-jsx": "^6.2.1", - "camelcase": "^6.2.0", - "cosmiconfig": "^7.0.1" + "color-convert": "^2.0.1" }, "engines": { - "node": ">=10" + "node": ">=8" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@docusaurus/core/node_modules/@svgr/hast-util-to-babel-ast": { - "version": "6.2.1", - "license": "MIT", + "node_modules/@docusaurus/logger/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { - "@babel/types": "^7.15.6", - "entities": "^3.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@docusaurus/core/node_modules/@svgr/plugin-jsx": { - "version": "6.2.1", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.15.5", - "@svgr/babel-preset": "^6.2.0", - "@svgr/hast-util-to-babel-ast": "^6.2.1", - "svg-parser": "^2.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@svgr/core": "^6.0.0" - } - }, - "node_modules/@docusaurus/core/node_modules/@svgr/plugin-svgo": { - "version": "6.2.0", - "license": "MIT", - "dependencies": { - "cosmiconfig": "^7.0.1", - "deepmerge": "^4.2.2", - "svgo": "^2.5.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@svgr/core": "^6.0.0" - } - }, - "node_modules/@docusaurus/core/node_modules/@svgr/webpack": { - "version": "6.2.1", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.15.5", - "@babel/plugin-transform-react-constant-elements": "^7.14.5", - "@babel/preset-env": "^7.15.6", - "@babel/preset-react": "^7.14.5", - "@babel/preset-typescript": "^7.15.0", - "@svgr/core": "^6.2.1", - "@svgr/plugin-jsx": "^6.2.1", - "@svgr/plugin-svgo": "^6.2.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@docusaurus/core/node_modules/css-loader": { - "version": "6.7.1", - "license": "MIT", - "dependencies": { - "icss-utils": "^5.1.0", - "postcss": "^8.4.7", - "postcss-modules-extract-imports": "^3.0.0", - "postcss-modules-local-by-default": "^4.0.0", - "postcss-modules-scope": "^3.0.0", - "postcss-modules-values": "^4.0.0", - "postcss-value-parser": "^4.2.0", - "semver": "^7.3.5" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - } - }, - "node_modules/@docusaurus/core/node_modules/css-tree": { - "version": "1.1.3", - "license": "MIT", - "dependencies": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/@docusaurus/core/node_modules/entities": { - "version": "3.0.1", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/@docusaurus/core/node_modules/icss-utils": { - "version": "5.1.0", - "license": "ISC", - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/@docusaurus/core/node_modules/mdn-data": { - "version": "2.0.14", - "license": "CC0-1.0" - }, - "node_modules/@docusaurus/core/node_modules/postcss-modules-extract-imports": { - "version": "3.0.0", - "license": "ISC", - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/@docusaurus/core/node_modules/postcss-modules-local-by-default": { - "version": "4.0.0", - "license": "MIT", - "dependencies": { - "icss-utils": "^5.0.0", - "postcss-selector-parser": "^6.0.2", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/@docusaurus/core/node_modules/postcss-modules-scope": { - "version": "3.0.0", - "license": "ISC", - "dependencies": { - "postcss-selector-parser": "^6.0.4" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/@docusaurus/core/node_modules/postcss-modules-values": { - "version": "4.0.0", - "license": "ISC", - "dependencies": { - "icss-utils": "^5.0.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/@docusaurus/core/node_modules/svgo": { - "version": "2.8.0", - "license": "MIT", - "dependencies": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^4.1.3", - "css-tree": "^1.1.3", - "csso": "^4.2.0", - "picocolors": "^1.0.0", - "stable": "^0.1.8" - }, - "bin": { - "svgo": "bin/svgo" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/@docusaurus/core/node_modules/svgo/node_modules/commander": { - "version": "7.2.0", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/@docusaurus/cssnano-preset": { - "version": "2.0.0-beta.17", - "license": "MIT", - "dependencies": { - "cssnano-preset-advanced": "^5.1.12", - "postcss": "^8.4.7", - "postcss-sort-media-queries": "^4.2.1" - } - }, - "node_modules/@docusaurus/logger": { - "version": "2.0.0-beta.17", - "license": "MIT", - "dependencies": { - "chalk": "^4.1.2", - "tslib": "^2.3.1" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/@docusaurus/logger/node_modules/ansi-styles": { - "version": "4.3.0", - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/@docusaurus/logger/node_modules/chalk": { - "version": "4.1.2", - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" }, "engines": { "node": ">=10" @@ -2318,7 +2320,8 @@ }, "node_modules/@docusaurus/logger/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, @@ -2328,18 +2331,21 @@ }, "node_modules/@docusaurus/logger/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/@docusaurus/logger/node_modules/has-flag": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/@docusaurus/logger/node_modules/supports-color": { "version": "7.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { "has-flag": "^4.0.0" }, @@ -2349,7 +2355,8 @@ }, "node_modules/@docusaurus/lqip-loader": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/lqip-loader/-/lqip-loader-2.0.0-beta.17.tgz", + "integrity": "sha512-pqwVuSYOthQgVVUmc8lI36hHzewKbBZBjLgxl3Khd2xL1gShZcb8o0Zx0IXSvqskZaq3E3TXRuzBAjyvIRaKdw==", "dependencies": { "@docusaurus/logger": "2.0.0-beta.17", "file-loader": "^6.2.0", @@ -2364,7 +2371,8 @@ }, "node_modules/@docusaurus/mdx-loader": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-beta.17.tgz", + "integrity": "sha512-AhJ3GWRmjQYCyINHE595pff5tn3Rt83oGpdev5UT9uvG9lPYPC8nEmh1LI6c0ogfw7YkNznzxWSW4hyyVbYQ3A==", "dependencies": { "@babel/parser": "^7.17.3", "@babel/traverse": "^7.17.3", @@ -2393,7 +2401,8 @@ }, "node_modules/@docusaurus/module-type-aliases": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.0.0-beta.17.tgz", + "integrity": "sha512-Tu+8geC/wyygBudbSwvWIHEvt5RwyA7dEoE1JmPbgQtmqUxOZ9bgnfemwXpJW5mKuDiJASbN4of1DhbLqf4sPg==", "dependencies": { "@docusaurus/types": "2.0.0-beta.17", "@types/react": "*", @@ -2408,7 +2417,8 @@ }, "node_modules/@docusaurus/plugin-content-blog": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-beta.17.tgz", + "integrity": "sha512-gcX4UR+WKT4bhF8FICBQHy+ESS9iRMeaglSboTZbA/YHGax/3EuZtcPU3dU4E/HFJeZ866wgUdbLKpIpsZOidg==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/logger": "2.0.0-beta.17", @@ -2436,7 +2446,8 @@ }, "node_modules/@docusaurus/plugin-content-docs": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-beta.17.tgz", + "integrity": "sha512-YYrBpuRfTfE6NtENrpSHTJ7K7PZifn6j6hcuvdC0QKE+WD8pS+O2/Ws30yoyvHwLnAnfhvaderh1v9Kaa0/ANg==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/logger": "2.0.0-beta.17", @@ -2463,7 +2474,8 @@ }, "node_modules/@docusaurus/plugin-content-pages": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-beta.17.tgz", + "integrity": "sha512-d5x0mXTMJ44ojRQccmLyshYoamFOep2AnBe69osCDnwWMbD3Or3pnc2KMK9N7mVpQFnNFKbHNCLrX3Rv0uwEHA==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/mdx-loader": "2.0.0-beta.17", @@ -2484,7 +2496,8 @@ }, "node_modules/@docusaurus/plugin-debug": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-2.0.0-beta.17.tgz", + "integrity": "sha512-p26fjYFRSC0esEmKo/kRrLVwXoFnzPCFDumwrImhPyqfVxbj+IKFaiXkayb2qHnyEGE/1KSDIgRF4CHt/pyhiw==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/utils": "2.0.0-beta.17", @@ -2502,7 +2515,8 @@ }, "node_modules/@docusaurus/plugin-google-analytics": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.0-beta.17.tgz", + "integrity": "sha512-jvgYIhggYD1W2jymqQVAAyjPJUV1xMCn70bAzaCMxriureMWzhQ/kQMVQpop0ijTMvifOxaV9yTcL1VRXev++A==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/utils-validation": "2.0.0-beta.17", @@ -2518,7 +2532,8 @@ }, "node_modules/@docusaurus/plugin-google-gtag": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.0-beta.17.tgz", + "integrity": "sha512-1pnWHtIk1Jfeqwvr8PlcPE5SODWT1gW4TI+ptmJbJ296FjjyvL/pG0AcGEJmYLY/OQc3oz0VQ0W2ognw9jmFIw==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/utils-validation": "2.0.0-beta.17", @@ -2534,7 +2549,8 @@ }, "node_modules/@docusaurus/plugin-ideal-image": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-ideal-image/-/plugin-ideal-image-2.0.0-beta.17.tgz", + "integrity": "sha512-9ZUi3cVnQsOCW4NSDyQRbH4UmlLqclKjQNQv+O/BKwO5EkjLeB4Q2TeHaXkivntrY4nARGjRcGBHKb3L9zzJXA==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/lqip-loader": "2.0.0-beta.17", @@ -2563,7 +2579,8 @@ }, "node_modules/@docusaurus/plugin-sitemap": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.0-beta.17.tgz", + "integrity": "sha512-19/PaGCsap6cjUPZPGs87yV9e1hAIyd0CTSeVV6Caega8nmOKk20FTrQGFJjZPeX8jvD9QIXcdg6BJnPxcKkaQ==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/utils": "2.0.0-beta.17", @@ -2583,7 +2600,8 @@ }, "node_modules/@docusaurus/preset-classic": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-2.0.0-beta.17.tgz", + "integrity": "sha512-7YUxPEgM09aZWr25/hpDEp1gPl+1KsCPV1ZTRW43sbQ9TinPm+9AKR3rHVDa8ea8MdiS7BpqCVyK+H/eiyQrUw==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/plugin-content-blog": "2.0.0-beta.17", @@ -2607,7 +2625,8 @@ }, "node_modules/@docusaurus/react-loadable": { "version": "5.5.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", + "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "dependencies": { "@types/react": "*", "prop-types": "^15.6.2" @@ -2618,7 +2637,8 @@ }, "node_modules/@docusaurus/responsive-loader": { "version": "1.7.0", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/@docusaurus/responsive-loader/-/responsive-loader-1.7.0.tgz", + "integrity": "sha512-N0cWuVqTRXRvkBxeMQcy/OF2l7GN8rmni5EzR3HpwR+iU2ckYPnziceojcxvvxQ5NqZg1QfEW0tycQgHp+e+Nw==", "dependencies": { "loader-utils": "^2.0.0" }, @@ -2640,7 +2660,8 @@ }, "node_modules/@docusaurus/theme-classic": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-2.0.0-beta.17.tgz", + "integrity": "sha512-xfZ9kpgqo0lP9YO4rJj79wtiQJXU6ARo5wYy10IIwiWN+lg00scJHhkmNV431b05xIUjUr0cKeH9nqZmEsQRKg==", "dependencies": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/plugin-content-blog": "2.0.0-beta.17", @@ -2672,7 +2693,8 @@ }, "node_modules/@docusaurus/theme-common": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.0.0-beta.17.tgz", + "integrity": "sha512-LJBDhx+Qexn1JHBqZbE4k+7lBaV1LgpE33enXf43ShB7ebhC91d5HLHhBwgt0pih4+elZU4rG+BG/roAmsNM0g==", "dependencies": { "@docusaurus/module-type-aliases": "2.0.0-beta.17", "@docusaurus/plugin-content-blog": "2.0.0-beta.17", @@ -2694,7 +2716,8 @@ }, "node_modules/@docusaurus/theme-search-algolia": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.0-beta.17.tgz", + "integrity": "sha512-W12XKM7QC5Jmrec359bJ7aDp5U8DNkCxjVKsMNIs8rDunBoI/N+R35ERJ0N7Bg9ONAWO6o7VkUERQsfGqdvr9w==", "dependencies": { "@docsearch/react": "^3.0.0", "@docusaurus/core": "2.0.0-beta.17", @@ -2722,7 +2745,8 @@ }, "node_modules/@docusaurus/theme-translations": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-2.0.0-beta.17.tgz", + "integrity": "sha512-oxCX6khjZH3lgdRCL0DH06KkUM/kDr9+lzB35+vY8rpFeQruVgRdi8ekPqG3+Wr0U/N+LMhcYE5BmCb6D0Fv2A==", "dependencies": { "fs-extra": "^10.0.1", "tslib": "^2.3.1" @@ -2733,7 +2757,8 @@ }, "node_modules/@docusaurus/types": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-2.0.0-beta.17.tgz", + "integrity": "sha512-4o7TXu5sKlQpybfFFtsGUElBXwSpiXKsQyyWaRKj7DRBkvMtkDX6ITZNnZO9+EHfLbP/cfrokB8C/oO7mCQ5BQ==", "dependencies": { "commander": "^5.1.0", "joi": "^17.6.0", @@ -2745,7 +2770,8 @@ }, "node_modules/@docusaurus/utils": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.0.0-beta.17.tgz", + "integrity": "sha512-yRKGdzSc5v6M/6GyQ4omkrAHCleevwKYiIrufCJgRbOtkhYE574d8mIjjirOuA/emcyLxjh+TLtqAA5TwhIryA==", "dependencies": { "@docusaurus/logger": "2.0.0-beta.17", "@svgr/webpack": "^6.0.0", @@ -2769,7 +2795,8 @@ }, "node_modules/@docusaurus/utils-common": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-2.0.0-beta.17.tgz", + "integrity": "sha512-90WCVdj6zYzs7neEIS594qfLO78cUL6EVK1CsRHJgVkkGjcYlCQ1NwkyO7bOb+nIAwdJrPJRc2FBSpuEGxPD3w==", "dependencies": { "tslib": "^2.3.1" }, @@ -2779,7 +2806,8 @@ }, "node_modules/@docusaurus/utils-validation": { "version": "2.0.0-beta.17", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-2.0.0-beta.17.tgz", + "integrity": "sha512-5UjayUP16fDjgd52eSEhL7SlN9x60pIhyS+K7kt7RmpSLy42+4/bSr2pns2VlATmuaoNOO6iIFdB2jgSYJ6SGA==", "dependencies": { "@docusaurus/logger": "2.0.0-beta.17", "@docusaurus/utils": "2.0.0-beta.17", @@ -2790,335 +2818,66 @@ "node": ">=14" } }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-add-jsx-attribute": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node_modules/@emotion/is-prop-valid": { + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", + "integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==", + "dependencies": { + "@emotion/memoize": "0.7.4" } }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-remove-jsx-attribute": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } + "node_modules/@emotion/memoize": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz", + "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==" }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "6.0.0", - "license": "MIT", + "node_modules/@emotion/stylis": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/@emotion/stylis/-/stylis-0.8.5.tgz", + "integrity": "sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ==" + }, + "node_modules/@emotion/unitless": { + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz", + "integrity": "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==" + }, + "node_modules/@endiliey/react-ideal-image": { + "version": "0.0.11", + "resolved": "https://registry.npmjs.org/@endiliey/react-ideal-image/-/react-ideal-image-0.0.11.tgz", + "integrity": "sha512-QxMjt/Gvur/gLxSoCy7VIyGGGrGmDN+VHcXkN3R2ApoWX0EYUE+hMgPHSW/PV6VVebZ1Nd4t2UnGRBDihu16JQ==", "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" + "node": ">= 8.9.0", + "npm": "> 3" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-svg-dynamic-title": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-svg-em-dimensions": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-transform-react-native-svg": { - "version": "6.0.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-plugin-transform-svg-component": { - "version": "6.2.0", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/babel-preset": { - "version": "6.2.0", - "license": "MIT", - "dependencies": { - "@svgr/babel-plugin-add-jsx-attribute": "^6.0.0", - "@svgr/babel-plugin-remove-jsx-attribute": "^6.0.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.0.0", - "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.0.0", - "@svgr/babel-plugin-svg-dynamic-title": "^6.0.0", - "@svgr/babel-plugin-svg-em-dimensions": "^6.0.0", - "@svgr/babel-plugin-transform-react-native-svg": "^6.0.0", - "@svgr/babel-plugin-transform-svg-component": "^6.2.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/core": { - "version": "6.2.1", - "license": "MIT", - "dependencies": { - "@svgr/plugin-jsx": "^6.2.1", - "camelcase": "^6.2.0", - "cosmiconfig": "^7.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/hast-util-to-babel-ast": { - "version": "6.2.1", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.15.6", - "entities": "^3.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/plugin-jsx": { - "version": "6.2.1", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.15.5", - "@svgr/babel-preset": "^6.2.0", - "@svgr/hast-util-to-babel-ast": "^6.2.1", - "svg-parser": "^2.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@svgr/core": "^6.0.0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/plugin-svgo": { - "version": "6.2.0", - "license": "MIT", - "dependencies": { - "cosmiconfig": "^7.0.1", - "deepmerge": "^4.2.2", - "svgo": "^2.5.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@svgr/core": "^6.0.0" - } - }, - "node_modules/@docusaurus/utils/node_modules/@svgr/webpack": { - "version": "6.2.1", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.15.5", - "@babel/plugin-transform-react-constant-elements": "^7.14.5", - "@babel/preset-env": "^7.15.6", - "@babel/preset-react": "^7.14.5", - "@babel/preset-typescript": "^7.15.0", - "@svgr/core": "^6.2.1", - "@svgr/plugin-jsx": "^6.2.1", - "@svgr/plugin-svgo": "^6.2.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@docusaurus/utils/node_modules/commander": { - "version": "7.2.0", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/@docusaurus/utils/node_modules/css-tree": { - "version": "1.1.3", - "license": "MIT", - "dependencies": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/@docusaurus/utils/node_modules/entities": { - "version": "3.0.1", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/@docusaurus/utils/node_modules/mdn-data": { - "version": "2.0.14", - "license": "CC0-1.0" - }, - "node_modules/@docusaurus/utils/node_modules/svgo": { - "version": "2.8.0", - "license": "MIT", - "dependencies": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^4.1.3", - "css-tree": "^1.1.3", - "csso": "^4.2.0", - "picocolors": "^1.0.0", - "stable": "^0.1.8" - }, - "bin": { - "svgo": "bin/svgo" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/@emotion/is-prop-valid": { - "version": "0.8.8", - "license": "MIT", - "dependencies": { - "@emotion/memoize": "0.7.4" - } - }, - "node_modules/@emotion/memoize": { - "version": "0.7.4", - "license": "MIT" - }, - "node_modules/@emotion/stylis": { - "version": "0.8.5", - "license": "MIT" - }, - "node_modules/@emotion/unitless": { - "version": "0.7.5", - "license": "MIT" - }, - "node_modules/@endiliey/react-ideal-image": { - "version": "0.0.11", - "license": "MIT", - "engines": { - "node": ">= 8.9.0", - "npm": "> 3" - }, - "peerDependencies": { - "prop-types": ">=15", - "react": ">=0.14.x", - "react-waypoint": ">=9.0.2" + "prop-types": ">=15", + "react": ">=0.14.x", + "react-waypoint": ">=9.0.2" } }, "node_modules/@exodus/schemasafe": { - "version": "1.0.0-rc.6", - "license": "MIT" + "version": "1.0.0-rc.9", + "resolved": "https://registry.npmjs.org/@exodus/schemasafe/-/schemasafe-1.0.0-rc.9.tgz", + "integrity": "sha512-dGGHpb61hLwifAu7sotuHFDBw6GTdpG8aKC0fsK17EuTzMRvUrH7lEAr6LTJ+sx3AZYed9yZ77rltVDHyg2hRg==" }, "node_modules/@hapi/hoek": { "version": "9.3.0", - "license": "BSD-3-Clause" + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", + "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==" }, "node_modules/@hapi/topo": { "version": "5.1.0", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", + "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", "dependencies": { "@hapi/hoek": "^9.0.0" } }, "node_modules/@istanbuljs/load-nyc-config": { "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", "dev": true, - "license": "ISC", "dependencies": { "camelcase": "^5.3.1", "find-up": "^4.1.0", @@ -3132,24 +2891,27 @@ }, "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dev": true, - "license": "MIT", "dependencies": { "sprintf-js": "~1.0.2" } }, "node_modules/@istanbuljs/load-nyc-config/node_modules/camelcase": { "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", "dev": true, - "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dev": true, - "license": "MIT", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -3160,24 +2922,27 @@ }, "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@jest/console": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-27.5.1.tgz", + "integrity": "sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg==", "dev": true, - "license": "MIT", "dependencies": { "@jest/types": "^27.5.1", "@types/node": "*", @@ -3192,8 +2957,9 @@ }, "node_modules/@jest/console/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -3206,8 +2972,9 @@ }, "node_modules/@jest/console/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -3221,8 +2988,9 @@ }, "node_modules/@jest/console/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -3232,21 +3000,61 @@ }, "node_modules/@jest/console/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/@jest/console/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/@jest/console/node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/console/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, "node_modules/@jest/console/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -3256,8 +3064,9 @@ }, "node_modules/@jest/core": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-27.5.1.tgz", + "integrity": "sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ==", "dev": true, - "license": "MIT", "dependencies": { "@jest/console": "^27.5.1", "@jest/reporters": "^27.5.1", @@ -3302,8 +3111,9 @@ }, "node_modules/@jest/core/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -3316,8 +3126,9 @@ }, "node_modules/@jest/core/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -3331,8 +3142,9 @@ }, "node_modules/@jest/core/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -3342,43 +3154,85 @@ }, "node_modules/@jest/core/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/@jest/core/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/@jest/core/node_modules/supports-color": { - "version": "7.2.0", + "node_modules/@jest/core/node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", "dev": true, - "license": "MIT", "dependencies": { - "has-flag": "^4.0.0" + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" }, "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/@jest/create-cache-key-function": { + "node_modules/@jest/core/node_modules/jest-util": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/types": "^27.5.1" - }, - "engines": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/core/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/create-cache-key-function": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/create-cache-key-function/-/create-cache-key-function-27.5.1.tgz", + "integrity": "sha512-dmH1yW+makpTSURTy8VzdUwFnfQh1G8R+DxO2Ho2FFmBbKFEVm+3jWdvFhE2VqB/LATCTokkP0dotjyQyw5/AQ==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1" + }, + "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/@jest/environment": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-27.5.1.tgz", + "integrity": "sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA==", "dev": true, - "license": "MIT", "dependencies": { "@jest/fake-timers": "^27.5.1", "@jest/types": "^27.5.1", @@ -3389,10 +3243,23 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, + "node_modules/@jest/expect-utils": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.2.1.tgz", + "integrity": "sha512-yr4aHNg5Z1CjKby5ozm7sKjgBlCOorlAoFcvrOQ/4rbZRfgZQdnmh7cth192PYIgiPZo2bBXvqdOApnAMWFJZg==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.2.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, "node_modules/@jest/fake-timers": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-27.5.1.tgz", + "integrity": "sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ==", "dev": true, - "license": "MIT", "dependencies": { "@jest/types": "^27.5.1", "@sinonjs/fake-timers": "^8.0.1", @@ -3405,10 +3272,118 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, + "node_modules/@jest/fake-timers/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/fake-timers/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/fake-timers/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/fake-timers/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@jest/fake-timers/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/fake-timers/node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/fake-timers/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/fake-timers/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/@jest/globals": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-27.5.1.tgz", + "integrity": "sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q==", "dev": true, - "license": "MIT", "dependencies": { "@jest/environment": "^27.5.1", "@jest/types": "^27.5.1", @@ -3418,10 +3393,164 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, + "node_modules/@jest/globals/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/globals/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/globals/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/globals/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@jest/globals/node_modules/diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/globals/node_modules/expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/globals/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/globals/node_modules/jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/globals/node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/globals/node_modules/jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/globals/node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/globals/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/@jest/reporters": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-27.5.1.tgz", + "integrity": "sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw==", "dev": true, - "license": "MIT", "dependencies": { "@bcoe/v8-coverage": "^0.2.3", "@jest/console": "^27.5.1", @@ -3463,8 +3592,9 @@ }, "node_modules/@jest/reporters/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -3477,8 +3607,9 @@ }, "node_modules/@jest/reporters/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -3492,8 +3623,9 @@ }, "node_modules/@jest/reporters/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -3503,21 +3635,41 @@ }, "node_modules/@jest/reporters/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/@jest/reporters/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/@jest/reporters/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, "node_modules/@jest/reporters/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -3525,10 +3677,23 @@ "node": ">=8" } }, + "node_modules/@jest/schemas": { + "version": "29.0.0", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.0.0.tgz", + "integrity": "sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.24.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, "node_modules/@jest/source-map": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-27.5.1.tgz", + "integrity": "sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg==", "dev": true, - "license": "MIT", "dependencies": { "callsites": "^3.0.0", "graceful-fs": "^4.2.9", @@ -3540,8 +3705,9 @@ }, "node_modules/@jest/test-result": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-27.5.1.tgz", + "integrity": "sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag==", "dev": true, - "license": "MIT", "dependencies": { "@jest/console": "^27.5.1", "@jest/types": "^27.5.1", @@ -3554,8 +3720,9 @@ }, "node_modules/@jest/test-sequencer": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz", + "integrity": "sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ==", "dev": true, - "license": "MIT", "dependencies": { "@jest/test-result": "^27.5.1", "graceful-fs": "^4.2.9", @@ -3568,8 +3735,9 @@ }, "node_modules/@jest/transform": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-27.5.1.tgz", + "integrity": "sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw==", "dev": true, - "license": "MIT", "dependencies": { "@babel/core": "^7.1.0", "@jest/types": "^27.5.1", @@ -3593,8 +3761,9 @@ }, "node_modules/@jest/transform/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -3607,8 +3776,9 @@ }, "node_modules/@jest/transform/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -3622,8 +3792,9 @@ }, "node_modules/@jest/transform/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -3633,21 +3804,41 @@ }, "node_modules/@jest/transform/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/@jest/transform/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/@jest/transform/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, "node_modules/@jest/transform/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -3657,8 +3848,9 @@ }, "node_modules/@jest/types": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.5.1.tgz", + "integrity": "sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==", "dev": true, - "license": "MIT", "dependencies": { "@types/istanbul-lib-coverage": "^2.0.0", "@types/istanbul-reports": "^3.0.0", @@ -3672,8 +3864,9 @@ }, "node_modules/@jest/types/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -3686,8 +3879,9 @@ }, "node_modules/@jest/types/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -3701,8 +3895,9 @@ }, "node_modules/@jest/types/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -3712,21 +3907,24 @@ }, "node_modules/@jest/types/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/@jest/types/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@jest/types/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -3735,11 +3933,12 @@ } }, "node_modules/@jimp/bmp": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/bmp/-/bmp-0.16.2.tgz", + "integrity": "sha512-4g9vW45QfMoGhLVvaFj26h4e7cC+McHUQwyFQmNTLW4FfC1OonN9oUr2m/FEDGkTYKR7aqdXR5XUqqIkHWLaFw==", "dependencies": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", + "@jimp/utils": "^0.16.2", "bmp-js": "^0.1.0" }, "peerDependencies": { @@ -3747,11 +3946,12 @@ } }, "node_modules/@jimp/core": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/core/-/core-0.16.2.tgz", + "integrity": "sha512-dp7HcyUMzjXphXYodI6PaXue+I9PXAavbb+AN+1XqFbotN22Z12DosNPEyy+UhLY/hZiQQqUkEaJHkvV31rs+w==", "dependencies": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", + "@jimp/utils": "^0.16.2", "any-base": "^1.1.0", "buffer": "^5.2.0", "exif-parser": "^0.1.12", @@ -3763,42 +3963,22 @@ "tinycolor2": "^1.4.1" } }, - "node_modules/@jimp/core/node_modules/buffer": { - "version": "5.7.1", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "node_modules/@jimp/custom": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/custom/-/custom-0.16.2.tgz", + "integrity": "sha512-GtNwOs4hcVS2GIbqRUf42rUuX07oLB92cj7cqxZb0ZGWwcwhnmSW0TFLAkNafXmqn9ug4VTpNvcJSUdiuECVKg==", "dependencies": { "@babel/runtime": "^7.7.2", - "@jimp/core": "^0.16.1" + "@jimp/core": "^0.16.2" } }, "node_modules/@jimp/gif": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/gif/-/gif-0.16.2.tgz", + "integrity": "sha512-TMdyT9Q0paIKNtT7c5KzQD29CNCsI/t8ka28jMrBjEK7j5RRTvBfuoOnHv7pDJRCjCIqeUoaUSJ7QcciKic6CA==", "dependencies": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", + "@jimp/utils": "^0.16.2", "gifwrap": "^0.9.2", "omggif": "^1.0.9" }, @@ -3807,34 +3987,37 @@ } }, "node_modules/@jimp/jpeg": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/jpeg/-/jpeg-0.16.2.tgz", + "integrity": "sha512-BW5gZydgq6wdIwHd+3iUNgrTklvoQc/FUKSj9meM6A0FU21lUaansRX5BDdJqHkyXJLnnlDGwDt27J+hQuBAVw==", "dependencies": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", - "jpeg-js": "0.4.2" + "@jimp/utils": "^0.16.2", + "jpeg-js": "^0.4.2" }, "peerDependencies": { "@jimp/custom": ">=0.3.5" } }, "node_modules/@jimp/plugin-resize": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/plugin-resize/-/plugin-resize-0.16.2.tgz", + "integrity": "sha512-gE4N9l6xuwzacFZ2EPCGZCJ/xR+aX2V7GdMndIl/6kYIw5/eib1SFuF9AZLvIPSFuE1FnGo8+vT0pr++SSbhYg==", "dependencies": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1" + "@jimp/utils": "^0.16.2" }, "peerDependencies": { "@jimp/custom": ">=0.3.5" } }, "node_modules/@jimp/png": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/png/-/png-0.16.2.tgz", + "integrity": "sha512-sFOtOSz/tzDwXEChFQ/Nxe+0+vG3Tj0eUxnZVDUG/StXE9dI8Bqmwj3MIa0EgK5s+QG3YlnDOmlPUa4JqmeYeQ==", "dependencies": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", + "@jimp/utils": "^0.16.2", "pngjs": "^3.3.3" }, "peerDependencies": { @@ -3842,8 +4025,9 @@ } }, "node_modules/@jimp/tiff": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/tiff/-/tiff-0.16.2.tgz", + "integrity": "sha512-ADcdqmtZF+U2YoaaHTzFX8D6NFpmN4WZUT0BPMerEuY7Cq8QoLYU22z2h034FrVW+Rbi1b3y04sB9iDiQAlf2w==", "dependencies": { "@babel/runtime": "^7.7.2", "utif": "^2.0.1" @@ -3853,15 +4037,16 @@ } }, "node_modules/@jimp/types": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/types/-/types-0.16.2.tgz", + "integrity": "sha512-0Ue5Sq0XnDF6TirisWv5E+8uOnRcd8vRLuwocJOhF76NIlcQrz+5r2k2XWKcr3d+11n28dHLXW5TKSqrUopxhA==", "dependencies": { "@babel/runtime": "^7.7.2", - "@jimp/bmp": "^0.16.1", - "@jimp/gif": "^0.16.1", - "@jimp/jpeg": "^0.16.1", - "@jimp/png": "^0.16.1", - "@jimp/tiff": "^0.16.1", + "@jimp/bmp": "^0.16.2", + "@jimp/gif": "^0.16.2", + "@jimp/jpeg": "^0.16.2", + "@jimp/png": "^0.16.2", + "@jimp/tiff": "^0.16.2", "timm": "^1.6.1" }, "peerDependencies": { @@ -3869,8 +4054,9 @@ } }, "node_modules/@jimp/utils": { - "version": "0.16.1", - "license": "MIT", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/utils/-/utils-0.16.2.tgz", + "integrity": "sha512-XENrPvmigiXZQ8E2nxJqO6UVvWBLzbNwyYi3Y8Q1IECoYhYI3kgOQ0fmy4G269Vz1V0omh1bNmC42r4OfXg1Jg==", "dependencies": { "@babel/runtime": "^7.7.2", "regenerator-runtime": "^0.13.3" @@ -3878,7 +4064,8 @@ }, "node_modules/@jridgewell/gen-mapping": { "version": "0.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", + "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" @@ -3888,38 +4075,66 @@ } }, "node_modules/@jridgewell/resolve-uri": { - "version": "3.0.6", - "license": "MIT", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/set-array": { - "version": "1.1.0", - "license": "MIT", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz", + "integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "node_modules/@jridgewell/source-map/node_modules/@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.11", - "license": "MIT" + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.9", - "license": "MIT", + "version": "0.3.17", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz", + "integrity": "sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==", "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" } }, "node_modules/@leichtgewicht/ip-codec": { - "version": "2.0.3", - "license": "MIT" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz", + "integrity": "sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==" }, "node_modules/@mdx-js/mdx": { "version": "1.6.22", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz", + "integrity": "sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA==", "dependencies": { "@babel/core": "7.12.9", "@babel/plugin-syntax-jsx": "7.12.1", @@ -3948,7 +4163,8 @@ }, "node_modules/@mdx-js/mdx/node_modules/@babel/core": { "version": "7.12.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", + "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", @@ -3977,7 +4193,8 @@ }, "node_modules/@mdx-js/mdx/node_modules/@babel/plugin-syntax-jsx": { "version": "7.12.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz", + "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, @@ -3987,21 +4204,24 @@ }, "node_modules/@mdx-js/mdx/node_modules/semver": { "version": "5.7.1", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "bin": { "semver": "bin/semver" } }, "node_modules/@mdx-js/mdx/node_modules/source-map": { "version": "0.5.7", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/@mdx-js/react": { "version": "1.6.22", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", + "integrity": "sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -4012,7 +4232,8 @@ }, "node_modules/@mdx-js/util": { "version": "1.6.22", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@mdx-js/util/-/util-1.6.22.tgz", + "integrity": "sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -4020,7 +4241,8 @@ }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" @@ -4031,14 +4253,16 @@ }, "node_modules/@nodelib/fs.stat": { "version": "2.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "engines": { "node": ">= 8" } }, "node_modules/@nodelib/fs.walk": { "version": "1.2.8", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" @@ -4049,11 +4273,13 @@ }, "node_modules/@polka/url": { "version": "1.0.0-next.21", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz", + "integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==" }, "node_modules/@redocly/ajv": { - "version": "8.6.4", - "license": "MIT", + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/@redocly/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-9GWx27t7xWhDIR02PA18nzBdLcKQRgc46xNQvjFkrYk4UOmvKhJ/dawwiX0cCOeetN5LcaaiqQbVOWYK62SGHw==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -4066,16 +4292,17 @@ } }, "node_modules/@redocly/openapi-core": { - "version": "1.0.0-beta.94", - "license": "MIT", + "version": "1.0.0-beta.111", + "resolved": "https://registry.npmjs.org/@redocly/openapi-core/-/openapi-core-1.0.0-beta.111.tgz", + "integrity": "sha512-t3dwM+IpQWisFyVvcgd7x9LvIv3B2uQOrfyHoQcEIERW3wsouvX98Vh/qhCh+xAXXXpH8Sh+k4Cz2Qn4ei9VAw==", "dependencies": { - "@redocly/ajv": "^8.6.4", + "@redocly/ajv": "^8.11.0", "@types/node": "^14.11.8", "colorette": "^1.2.0", "js-levenshtein": "^1.1.6", "js-yaml": "^4.1.0", "lodash.isequal": "^4.5.0", - "minimatch": "^3.0.4", + "minimatch": "^5.0.1", "node-fetch": "^2.6.1", "pluralize": "^8.0.0", "yaml-ast-parser": "0.0.43" @@ -4085,257 +4312,230 @@ } }, "node_modules/@redocly/openapi-core/node_modules/@types/node": { - "version": "14.18.16", - "license": "MIT" + "version": "14.18.32", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.32.tgz", + "integrity": "sha512-Y6S38pFr04yb13qqHf8uk1nHE3lXgQ30WZbv1mLliV9pt0NjvqdWttLcrOYLnXbOafknVYRHZGoMSpR9UwfYow==" }, - "node_modules/@redocly/react-dropdown-aria": { - "version": "2.0.12", - "license": "MIT", - "peerDependencies": { - "react": "^16.8.4 || ^17.0.0", - "react-dom": "^16.8.4 || ^17.0.0", - "styled-components": "^5.1.1" + "node_modules/@redocly/openapi-core/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@redocly/openapi-core/node_modules/colorette": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.4.0.tgz", + "integrity": "sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==" + }, + "node_modules/@redocly/openapi-core/node_modules/minimatch": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.0.tgz", + "integrity": "sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" } }, "node_modules/@sideway/address": { "version": "4.1.4", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz", + "integrity": "sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==", "dependencies": { "@hapi/hoek": "^9.0.0" } }, "node_modules/@sideway/formula": { "version": "3.0.0", - "license": "BSD-3-Clause" + "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz", + "integrity": "sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==" }, "node_modules/@sideway/pinpoint": { "version": "2.0.0", - "license": "BSD-3-Clause" + "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", + "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==" + }, + "node_modules/@sinclair/typebox": { + "version": "0.24.47", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.47.tgz", + "integrity": "sha512-J4Xw0xYK4h7eC34MNOPQi6IkNxGRck6n4VJpWDzXIFVTW8I/D43Gf+NfWz/v/7NHlzWOPd3+T4PJ4OqklQ2u7A==", + "dev": true }, "node_modules/@sindresorhus/is": { "version": "0.14.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", + "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==", "engines": { "node": ">=6" } }, "node_modules/@sinonjs/commons": { "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", + "integrity": "sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { "type-detect": "4.0.8" } }, "node_modules/@sinonjs/fake-timers": { "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz", + "integrity": "sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { "@sinonjs/commons": "^1.7.0" } }, "node_modules/@slorber/static-site-generator-webpack-plugin": { - "version": "4.0.4", - "license": "MIT", + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz", + "integrity": "sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA==", "dependencies": { - "bluebird": "^3.7.1", - "cheerio": "^0.22.0", "eval": "^0.1.8", - "webpack-sources": "^1.4.3" - } - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/cheerio": { - "version": "0.22.0", - "license": "MIT", - "dependencies": { - "css-select": "~1.2.0", - "dom-serializer": "~0.1.0", - "entities": "~1.1.1", - "htmlparser2": "^3.9.1", - "lodash.assignin": "^4.0.9", - "lodash.bind": "^4.1.4", - "lodash.defaults": "^4.0.1", - "lodash.filter": "^4.4.0", - "lodash.flatten": "^4.2.0", - "lodash.foreach": "^4.3.0", - "lodash.map": "^4.4.0", - "lodash.merge": "^4.4.0", - "lodash.pick": "^4.2.1", - "lodash.reduce": "^4.4.0", - "lodash.reject": "^4.4.0", - "lodash.some": "^4.4.0" + "p-map": "^4.0.0", + "webpack-sources": "^3.2.2" }, "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/css-select": { - "version": "1.2.0", - "license": "BSD-like", - "dependencies": { - "boolbase": "~1.0.0", - "css-what": "2.1", - "domutils": "1.5.1", - "nth-check": "~1.0.1" - } - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/css-what": { - "version": "2.1.3", - "license": "BSD-2-Clause", - "engines": { - "node": "*" - } - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/dom-serializer": { - "version": "0.1.1", - "license": "MIT", - "dependencies": { - "domelementtype": "^1.3.0", - "entities": "^1.1.1" - } - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/domelementtype": { - "version": "1.3.1", - "license": "BSD-2-Clause" - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/domhandler": { - "version": "2.4.2", - "license": "BSD-2-Clause", - "dependencies": { - "domelementtype": "1" - } - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/domutils": { - "version": "1.5.1", - "dependencies": { - "dom-serializer": "0", - "domelementtype": "1" - } - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/entities": { - "version": "1.1.2", - "license": "BSD-2-Clause" - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/htmlparser2": { - "version": "3.10.1", - "license": "MIT", - "dependencies": { - "domelementtype": "^1.3.1", - "domhandler": "^2.3.0", - "domutils": "^1.5.1", - "entities": "^1.1.1", - "inherits": "^2.0.1", - "readable-stream": "^3.1.1" - } - }, - "node_modules/@slorber/static-site-generator-webpack-plugin/node_modules/nth-check": { - "version": "1.0.2", - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "~1.0.0" + "node": ">=14" } }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { - "version": "5.4.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.5.0.tgz", + "integrity": "sha512-Cp1JR1IPrQNvPRbkfcPmax52iunBC+eQDyBce8feOIIbVH6ZpVhErYoJtPWRBj2rKi4Wi9HvCm1+L1UD6QlBmg==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/babel-plugin-remove-jsx-attribute": { - "version": "5.4.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.5.0.tgz", + "integrity": "sha512-8zYdkym7qNyfXpWvu4yq46k41pyNM9SOstoWhKlm+IfdCE1DdnRKeMUPsWIEO/DEkaWxJ8T9esNdG3QwQ93jBA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "5.0.1", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.5.0.tgz", + "integrity": "sha512-NFdxMq3xA42Kb1UbzCVxplUc0iqSyM9X8kopImvFnB+uSDdzIHOdbs1op8ofAvVRtbg4oZiyRl3fTYeKcOe9Iw==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "5.0.1", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.5.0.tgz", + "integrity": "sha512-XWm64/rSPUCQ+MFyA9lhMO+w8bOZvkTvovRIU1lpIy63ysPaVAFtxjQiZj+S7QaLaLGUXkSkf8WZsaN+QPo/gA==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/babel-plugin-svg-dynamic-title": { - "version": "5.4.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.5.0.tgz", + "integrity": "sha512-JIF2D2ltiWFGlTw2fJ9jJg1fNT9rWjOD2Cf0/xzeW6Z2LIRQTHcRHxpZq359+SRWtEPsCXEWV2Xmd+DMBj6dBw==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/babel-plugin-svg-em-dimensions": { - "version": "5.4.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.5.0.tgz", + "integrity": "sha512-uuo0FfLP4Nu2zncOcoUFDzZdXWma2bxkTGk0etRThs4/PghvPIGaW8cPhCg6yJ8zpaauWcKV0wZtzKlJRCtVzg==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/babel-plugin-transform-react-native-svg": { - "version": "5.4.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.5.0.tgz", + "integrity": "sha512-VMRWyOmrV+DaEFPgP3hZMsFgs2g87ojs3txw0Rx8iz6Nf/E3UoHUwTqpkSCWd3Hsnc9gMOY9+wl6+/Ycleh1sw==", "engines": { "node": ">=10" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/babel-plugin-transform-svg-component": { - "version": "5.5.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.5.0.tgz", + "integrity": "sha512-b67Ul3SelaqvGEEG/1B3VJ03KUtGFgRQjRLCCjdttMQLcYa9l/izQFEclNFx53pNqhijUMNKHPhGMY/CWGVKig==", "engines": { - "node": ">=10" + "node": ">=12" }, "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/babel-preset": { - "version": "5.5.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.5.0.tgz", + "integrity": "sha512-UWM98PKVuMqw2UZo8YO3erI6nF1n7/XBYTXBqR0QhZP7HTjYK6QxFNvPfIshddy1hBdzhVpkf148Vg8xiVOtyg==", "dependencies": { - "@svgr/babel-plugin-add-jsx-attribute": "^5.4.0", - "@svgr/babel-plugin-remove-jsx-attribute": "^5.4.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "^5.0.1", - "@svgr/babel-plugin-replace-jsx-attribute-value": "^5.0.1", - "@svgr/babel-plugin-svg-dynamic-title": "^5.4.0", - "@svgr/babel-plugin-svg-em-dimensions": "^5.4.0", - "@svgr/babel-plugin-transform-react-native-svg": "^5.4.0", - "@svgr/babel-plugin-transform-svg-component": "^5.5.0" + "@svgr/babel-plugin-add-jsx-attribute": "^6.5.0", + "@svgr/babel-plugin-remove-jsx-attribute": "^6.5.0", + "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.5.0", + "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.5.0", + "@svgr/babel-plugin-svg-dynamic-title": "^6.5.0", + "@svgr/babel-plugin-svg-em-dimensions": "^6.5.0", + "@svgr/babel-plugin-transform-react-native-svg": "^6.5.0", + "@svgr/babel-plugin-transform-svg-component": "^6.5.0" }, "engines": { "node": ">=10" @@ -4343,15 +4543,21 @@ "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/@svgr/core": { - "version": "5.5.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/core/-/core-6.5.0.tgz", + "integrity": "sha512-jIbu36GMjfK8HCCQitkfVVeQ2vSXGfq0ef0GO9HUxZGjal6Kvpkk4PwpkFP+OyCzF+skQFT9aWrUqekT3pKF8w==", "dependencies": { - "@svgr/plugin-jsx": "^5.5.0", + "@babel/core": "^7.18.5", + "@svgr/babel-preset": "^6.5.0", + "@svgr/plugin-jsx": "^6.5.0", "camelcase": "^6.2.0", - "cosmiconfig": "^7.0.0" + "cosmiconfig": "^7.0.1" }, "engines": { "node": ">=10" @@ -4362,10 +4568,12 @@ } }, "node_modules/@svgr/hast-util-to-babel-ast": { - "version": "5.5.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.5.0.tgz", + "integrity": "sha512-PPy94U/EiPQ2dY0b4jEqj4QOdDRq6DG7aTHjpGaL8HlKSHkpU1DpjfywCXTJqtOdCo2FywjWvg0U2FhqMeUJaA==", "dependencies": { - "@babel/types": "^7.12.6" + "@babel/types": "^7.18.4", + "entities": "^4.3.0" }, "engines": { "node": ">=10" @@ -4376,13 +4584,14 @@ } }, "node_modules/@svgr/plugin-jsx": { - "version": "5.5.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.5.0.tgz", + "integrity": "sha512-1CHMqOBKoNk/ZPU+iGXKcQPC6q9zaD7UOI99J+BaGY5bdCztcf5bZyi0QZSDRJtCQpdofeVv7XfBYov2mtl0Pw==", "dependencies": { - "@babel/core": "^7.12.3", - "@svgr/babel-preset": "^5.5.0", - "@svgr/hast-util-to-babel-ast": "^5.5.0", - "svg-parser": "^2.0.2" + "@babel/core": "^7.18.5", + "@svgr/babel-preset": "^6.5.0", + "@svgr/hast-util-to-babel-ast": "^6.5.0", + "svg-parser": "^2.0.4" }, "engines": { "node": ">=10" @@ -4390,15 +4599,19 @@ "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@svgr/core": "^6.0.0" } }, "node_modules/@svgr/plugin-svgo": { - "version": "5.5.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.5.0.tgz", + "integrity": "sha512-8Zv1Yyv6I7HlIqrqGFM0sDKQrhjbfNZJawR8UjIaVWSb0tKZP1Ra6ymhqIFu6FT6kDRD0Ct5NlQZ10VUujSspw==", "dependencies": { - "cosmiconfig": "^7.0.0", + "cosmiconfig": "^7.0.1", "deepmerge": "^4.2.2", - "svgo": "^1.2.2" + "svgo": "^2.8.0" }, "engines": { "node": ">=10" @@ -4406,20 +4619,24 @@ "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@svgr/core": "^6.0.0" } }, "node_modules/@svgr/webpack": { - "version": "5.5.0", - "license": "MIT", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.5.0.tgz", + "integrity": "sha512-rM/Z4pwMhqvAXEHoHIlE4SeTb0ToQNmJuBdiHwhP2ZtywyX6XqrgCv2WX7K/UCgNYJgYbekuylgyjnuLUHTcZQ==", "dependencies": { - "@babel/core": "^7.12.3", - "@babel/plugin-transform-react-constant-elements": "^7.12.1", - "@babel/preset-env": "^7.12.1", - "@babel/preset-react": "^7.12.5", - "@svgr/core": "^5.5.0", - "@svgr/plugin-jsx": "^5.5.0", - "@svgr/plugin-svgo": "^5.5.0", - "loader-utils": "^2.0.0" + "@babel/core": "^7.18.5", + "@babel/plugin-transform-react-constant-elements": "^7.17.12", + "@babel/preset-env": "^7.18.2", + "@babel/preset-react": "^7.17.12", + "@babel/preset-typescript": "^7.17.12", + "@svgr/core": "^6.5.0", + "@svgr/plugin-jsx": "^6.5.0", + "@svgr/plugin-svgo": "^6.5.0" }, "engines": { "node": ">=10" @@ -4430,9 +4647,11 @@ } }, "node_modules/@swc/core": { - "version": "1.2.174", + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.9.tgz", + "integrity": "sha512-PCRCO9vIoEX3FyS3z/FkWVYJzuspUq0LLaWdK3L30+KQDtH29K+LQdRc2Dzin2MU5MpY4bSHydAwl9M6cmZ9OA==", "dev": true, - "license": "Apache-2.0", + "hasInstallScript": true, "bin": { "swcx": "run_swcx.js" }, @@ -4444,28 +4663,74 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-android-arm-eabi": "1.2.174", - "@swc/core-android-arm64": "1.2.174", - "@swc/core-darwin-arm64": "1.2.174", - "@swc/core-darwin-x64": "1.2.174", - "@swc/core-freebsd-x64": "1.2.174", - "@swc/core-linux-arm-gnueabihf": "1.2.174", - "@swc/core-linux-arm64-gnu": "1.2.174", - "@swc/core-linux-arm64-musl": "1.2.174", - "@swc/core-linux-x64-gnu": "1.2.174", - "@swc/core-linux-x64-musl": "1.2.174", - "@swc/core-win32-arm64-msvc": "1.2.174", - "@swc/core-win32-ia32-msvc": "1.2.174", - "@swc/core-win32-x64-msvc": "1.2.174" + "@swc/core-android-arm-eabi": "1.3.9", + "@swc/core-android-arm64": "1.3.9", + "@swc/core-darwin-arm64": "1.3.9", + "@swc/core-darwin-x64": "1.3.9", + "@swc/core-freebsd-x64": "1.3.9", + "@swc/core-linux-arm-gnueabihf": "1.3.9", + "@swc/core-linux-arm64-gnu": "1.3.9", + "@swc/core-linux-arm64-musl": "1.3.9", + "@swc/core-linux-x64-gnu": "1.3.9", + "@swc/core-linux-x64-musl": "1.3.9", + "@swc/core-win32-arm64-msvc": "1.3.9", + "@swc/core-win32-ia32-msvc": "1.3.9", + "@swc/core-win32-x64-msvc": "1.3.9" + } + }, + "node_modules/@swc/core-android-arm-eabi": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-android-arm-eabi/-/core-android-arm-eabi-1.3.9.tgz", + "integrity": "sha512-+F+sU2l49Po4tJoNtIpFwt0k1sspymvPMM+DCpnkHF1idzRiOU5NGgVzmLDjoO9AnxHa7EBJ3itN+PP2Dd06+A==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "dependencies": { + "@swc/wasm": "1.2.122" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-android-arm64": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-android-arm64/-/core-android-arm64-1.3.9.tgz", + "integrity": "sha512-HSWdex3yd4CRefkM2WVz0nTKjpirNZnwSlghqe4ct9QAYGMiiPesYgWPAnq/PpnYfmjQse4yvEclamGiek6zDA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "dependencies": { + "@swc/wasm": "1.2.130" + }, + "engines": { + "node": ">=10" } }, + "node_modules/@swc/core-android-arm64/node_modules/@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.2.174", + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.9.tgz", + "integrity": "sha512-E7WJY1LsMJtOtUYc/JXl8qlt6USnzodWmdO1eAAOSAODEdX9AjgG3fRT94o3UcmvMrto7sxBXVExj8wG7Cxeng==", "cpu": [ "arm64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "darwin" @@ -4474,36 +4739,247 @@ "node": ">=10" } }, - "node_modules/@swc/jest": { - "version": "0.2.20", + "node_modules/@swc/core-darwin-x64": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.9.tgz", + "integrity": "sha512-0+dFCAcLEBxwIO+0Nt+OT8mjPpvBMBWIuFWB1DNiUu2K73+OB0i+llzsCJFoasISHR+YJD0bGyv+8AtVuUdFAw==", + "cpu": [ + "x64" + ], "dev": true, - "license": "MIT", - "dependencies": { - "@jest/create-cache-key-function": "^27.4.2" - }, + "optional": true, + "os": [ + "darwin" + ], "engines": { - "npm": ">= 7.0.0" - }, - "peerDependencies": { - "@swc/core": "*" + "node": ">=10" } }, - "node_modules/@szmarczak/http-timer": { - "version": "1.1.2", - "license": "MIT", + "node_modules/@swc/core-freebsd-x64": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-freebsd-x64/-/core-freebsd-x64-1.3.9.tgz", + "integrity": "sha512-JbHIeklQPRBEZUfKAKt/IB/ayi7dJZ9tEGu/fDxNfk8Znu1Md+YOKRyN5FPMXfYrL5yFUXnlFOb2LX6wjNhhjQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], "dependencies": { - "defer-to-connect": "^1.0.1" + "@swc/wasm": "1.2.130" }, "engines": { - "node": ">=6" + "node": ">=10" } }, - "node_modules/@testing-library/dom": { - "version": "8.13.0", + "node_modules/@swc/core-freebsd-x64/node_modules/@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + }, + "node_modules/@swc/core-linux-arm-gnueabihf": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.9.tgz", + "integrity": "sha512-Yc1G8FGXmq6yGKtu5wYCcvVWBtqU0/3FUk6zJM+7pFiivKsVHJcgWrkgLO1u6h7bgEdQIYwfM3/BbRNE5CtdnA==", + "cpu": [ + "arm" + ], "dev": true, - "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "dependencies": { - "@babel/code-frame": "^7.10.4", + "@swc/wasm": "1.2.130" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm-gnueabihf/node_modules/@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + }, + "node_modules/@swc/core-linux-arm64-gnu": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.9.tgz", + "integrity": "sha512-PrBjmPIMhoQLCpfaZl2b1cCXnaNPddQB/ssMVqQ6eXChBJfcv14M5BjxtI2ORi4HoEDlsbX+k50sL666M3lnBw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-musl": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.9.tgz", + "integrity": "sha512-jJT56vt81o2N3O2nXp+MZGM6mbgkNx6lvvRT6yISW29fLM6NHBXmkGcjaWOD9VFJDRmu/MtFxbElPxr6ikrFYQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-gnu": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.9.tgz", + "integrity": "sha512-60ZreTvrJk3N7xvPzQeQJDePsXUmSUZkKD6lc0xzug4bv53NyUIQ8gH8nzVsV++D9NZeVxXp6WqqFLcgt7yEDQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-musl": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.9.tgz", + "integrity": "sha512-UBApPfUSP+w6ye6V1oT4EGh3LFCFrZaQsC1CkTuiYXXSmQMzkYE0Jzegn3R7MHWCJSneRwXRTKrkdhrNBUqWKA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-arm64-msvc": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.9.tgz", + "integrity": "sha512-4FQSalXbbnqTLVGRljRnw/bJ99Jwj1WnXz/aJM/SVL8S9Zbc82+3v+wXL/9NGwaAndu2QUkb2KPYNAHvB7PCdw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "dependencies": { + "@swc/wasm": "1.2.130" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-arm64-msvc/node_modules/@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + }, + "node_modules/@swc/core-win32-ia32-msvc": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.9.tgz", + "integrity": "sha512-ZkTw1Cm+b2QBf/NjkJJbocvgT0NWdfPQL0OyMkuTAinRzfrMmq/lmshjnqj3ysFVeI4uuJTNemiT6mivpLmuBw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "dependencies": { + "@swc/wasm": "1.2.130" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-ia32-msvc/node_modules/@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + }, + "node_modules/@swc/core-win32-x64-msvc": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.9.tgz", + "integrity": "sha512-moKi2prCKzYnXXlrLf5nwAN4uGSm4YpsW2xzYiZWJJDRqu74VoUWoDkG25jalHTfN/PSBQg4dkFWhhUe89JJVw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/jest": { + "version": "0.2.23", + "resolved": "https://registry.npmjs.org/@swc/jest/-/jest-0.2.23.tgz", + "integrity": "sha512-ZLj17XjHbPtNsgqjm83qizENw05emLkKGu3WuPUttcy9hkngl0/kcc7fDbcSBpADS0GUtsO+iKPjZFWVAtJSlA==", + "dev": true, + "dependencies": { + "@jest/create-cache-key-function": "^27.4.2", + "jsonc-parser": "^3.2.0" + }, + "engines": { + "npm": ">= 7.0.0" + }, + "peerDependencies": { + "@swc/core": "*" + } + }, + "node_modules/@swc/wasm": { + "version": "1.2.122", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.122.tgz", + "integrity": "sha512-sM1VCWQxmNhFtdxME+8UXNyPNhxNu7zdb6ikWpz0YKAQQFRGT5ThZgJrubEpah335SUToNg8pkdDF7ibVCjxbQ==", + "dev": true, + "optional": true + }, + "node_modules/@szmarczak/http-timer": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", + "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", + "dependencies": { + "defer-to-connect": "^1.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@testing-library/dom": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.19.0.tgz", + "integrity": "sha512-6YWYPPpxG3e/xOo6HIWwB/58HukkwIVTOaZ0VwdMVjhRUX/01E4FtQbck9GazOOj7MXHc5RBzMrU86iBJHbI+A==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", "@babel/runtime": "^7.12.5", "@types/aria-query": "^4.2.0", "aria-query": "^5.0.0", @@ -4518,8 +4994,9 @@ }, "node_modules/@testing-library/dom/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -4532,8 +5009,9 @@ }, "node_modules/@testing-library/dom/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -4547,8 +5025,9 @@ }, "node_modules/@testing-library/dom/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -4558,21 +5037,24 @@ }, "node_modules/@testing-library/dom/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/@testing-library/dom/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@testing-library/dom/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -4581,15 +5063,16 @@ } }, "node_modules/@testing-library/jest-dom": { - "version": "5.16.4", + "version": "5.16.5", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-5.16.5.tgz", + "integrity": "sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA==", "dev": true, - "license": "MIT", "dependencies": { + "@adobe/css-tools": "^4.0.1", "@babel/runtime": "^7.9.2", "@types/testing-library__jest-dom": "^5.9.1", "aria-query": "^5.0.0", "chalk": "^3.0.0", - "css": "^3.0.0", "css.escape": "^1.5.1", "dom-accessibility-api": "^0.5.6", "lodash": "^4.17.15", @@ -4603,8 +5086,9 @@ }, "node_modules/@testing-library/jest-dom/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -4617,8 +5101,9 @@ }, "node_modules/@testing-library/jest-dom/node_modules/chalk": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -4629,8 +5114,9 @@ }, "node_modules/@testing-library/jest-dom/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -4640,21 +5126,24 @@ }, "node_modules/@testing-library/jest-dom/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/@testing-library/jest-dom/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@testing-library/jest-dom/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -4664,8 +5153,9 @@ }, "node_modules/@testing-library/react": { "version": "12.1.5", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.5.tgz", + "integrity": "sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg==", "dev": true, - "license": "MIT", "dependencies": { "@babel/runtime": "^7.12.5", "@testing-library/dom": "^8.0.0", @@ -4681,8 +5171,9 @@ }, "node_modules/@testing-library/user-event": { "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-13.5.0.tgz", + "integrity": "sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg==", "dev": true, - "license": "MIT", "dependencies": { "@babel/runtime": "^7.12.5" }, @@ -4696,28 +5187,32 @@ }, "node_modules/@tootallnate/once": { "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", "dev": true, - "license": "MIT", "engines": { "node": ">= 6" } }, "node_modules/@trysound/sax": { "version": "0.2.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", + "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==", "engines": { "node": ">=10.13.0" } }, "node_modules/@types/aria-query": { "version": "4.2.2", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-4.2.2.tgz", + "integrity": "sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig==", + "dev": true }, "node_modules/@types/babel__core": { "version": "7.1.19", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.19.tgz", + "integrity": "sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw==", "dev": true, - "license": "MIT", "dependencies": { "@babel/parser": "^7.1.0", "@babel/types": "^7.0.0", @@ -4728,32 +5223,36 @@ }, "node_modules/@types/babel__generator": { "version": "7.6.4", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz", + "integrity": "sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==", "dev": true, - "license": "MIT", "dependencies": { "@babel/types": "^7.0.0" } }, "node_modules/@types/babel__template": { "version": "7.4.1", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.1.tgz", + "integrity": "sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==", "dev": true, - "license": "MIT", "dependencies": { "@babel/parser": "^7.1.0", "@babel/types": "^7.0.0" } }, "node_modules/@types/babel__traverse": { - "version": "7.17.1", + "version": "7.18.2", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.18.2.tgz", + "integrity": "sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg==", "dev": true, - "license": "MIT", "dependencies": { "@babel/types": "^7.3.0" } }, "node_modules/@types/body-parser": { "version": "1.19.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", "dependencies": { "@types/connect": "*", "@types/node": "*" @@ -4761,37 +5260,42 @@ }, "node_modules/@types/bonjour": { "version": "3.5.10", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.10.tgz", + "integrity": "sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/connect": { "version": "3.4.35", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/connect-history-api-fallback": { "version": "1.3.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz", + "integrity": "sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw==", "dependencies": { "@types/express-serve-static-core": "*", "@types/node": "*" } }, "node_modules/@types/eslint": { - "version": "8.4.1", - "license": "MIT", + "version": "8.4.6", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.6.tgz", + "integrity": "sha512-/fqTbjxyFUaYNO7VcW5g+4npmqVACz1bB7RTHYuLj+PRjw9hrCwrUXVQFpChUS0JsyEFvMZ7U/PfmvWgxJhI9g==", "dependencies": { "@types/estree": "*", "@types/json-schema": "*" } }, "node_modules/@types/eslint-scope": { - "version": "3.7.3", - "license": "MIT", + "version": "3.7.4", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.4.tgz", + "integrity": "sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA==", "dependencies": { "@types/eslint": "*", "@types/estree": "*" @@ -4799,11 +5303,13 @@ }, "node_modules/@types/estree": { "version": "0.0.51", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", + "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==" }, "node_modules/@types/express": { - "version": "4.17.13", - "license": "MIT", + "version": "4.17.14", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.14.tgz", + "integrity": "sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg==", "dependencies": { "@types/body-parser": "*", "@types/express-serve-static-core": "^4.17.18", @@ -4812,8 +5318,9 @@ } }, "node_modules/@types/express-serve-static-core": { - "version": "4.17.28", - "license": "MIT", + "version": "4.17.31", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.31.tgz", + "integrity": "sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q==", "dependencies": { "@types/node": "*", "@types/qs": "*", @@ -4822,119 +5329,167 @@ }, "node_modules/@types/graceful-fs": { "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz", + "integrity": "sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==", "dev": true, - "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/hast": { "version": "2.3.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz", + "integrity": "sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==", "dependencies": { "@types/unist": "*" } }, "node_modules/@types/history": { "version": "4.7.11", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz", + "integrity": "sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA==" }, "node_modules/@types/html-minifier-terser": { "version": "6.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", + "integrity": "sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==" }, "node_modules/@types/http-proxy": { - "version": "1.17.8", - "license": "MIT", + "version": "1.17.9", + "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.9.tgz", + "integrity": "sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true }, "node_modules/@types/istanbul-lib-report": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", "dev": true, - "license": "MIT", "dependencies": { "@types/istanbul-lib-coverage": "*" } }, "node_modules/@types/istanbul-reports": { "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", "dev": true, - "license": "MIT", "dependencies": { "@types/istanbul-lib-report": "*" } }, "node_modules/@types/jest": { - "version": "27.4.1", + "version": "29.2.0", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.2.0.tgz", + "integrity": "sha512-KO7bPV21d65PKwv3LLsD8Jn3E05pjNjRZvkm+YTacWhVmykAb07wW6IkZUmQAltwQafNcDUEUrMO2h3jeBSisg==", + "dev": true, + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/jest/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@types/jest/node_modules/pretty-format": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.2.1.tgz", + "integrity": "sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA==", "dev": true, - "license": "MIT", "dependencies": { - "jest-matcher-utils": "^27.0.0", - "pretty-format": "^27.0.0" + "@jest/schemas": "^29.0.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/@types/json-schema": { "version": "7.0.11", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==" + }, + "node_modules/@types/katex": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.11.1.tgz", + "integrity": "sha512-DUlIj2nk0YnJdlWgsFuVKcX27MLW0KbKmGVoUHmFr+74FYYNUDAaj9ZqTADvsbE8rfxuVmSFc7KczYn5Y09ozg==" }, "node_modules/@types/lodash": { - "version": "4.14.182", - "license": "MIT" + "version": "4.14.186", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.186.tgz", + "integrity": "sha512-eHcVlLXP0c2FlMPm56ITode2AgLMSa6aJ05JTTbYbI+7EMkCEE5qk2E41d5g2lCVTqRe0GnnRFurmlCsDODrPw==" }, "node_modules/@types/mdast": { "version": "3.0.10", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz", + "integrity": "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==", "dependencies": { "@types/unist": "*" } }, "node_modules/@types/mime": { - "version": "1.3.2", - "license": "MIT" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz", + "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==" }, "node_modules/@types/node": { - "version": "17.0.31", - "license": "MIT" + "version": "18.11.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", + "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==" }, "node_modules/@types/parse-json": { "version": "4.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" }, "node_modules/@types/parse5": { "version": "5.0.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz", + "integrity": "sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==" }, "node_modules/@types/prettier": { - "version": "2.6.0", - "dev": true, - "license": "MIT" + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.1.tgz", + "integrity": "sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow==", + "dev": true }, "node_modules/@types/prop-types": { "version": "15.7.5", - "license": "MIT" - }, - "node_modules/@types/q": { - "version": "1.5.5", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", + "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" }, "node_modules/@types/qs": { "version": "6.9.7", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" }, "node_modules/@types/range-parser": { "version": "1.2.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" }, "node_modules/@types/react": { - "version": "17.0.44", - "license": "MIT", + "version": "18.0.21", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.0.21.tgz", + "integrity": "sha512-7QUCOxvFgnD5Jk8ZKlUAhVcRj7GuJRjnjjiY/IUBWKgOlnvDvTMLD4RTF7NPyVmbRhNrbomZiOepg7M/2Kj1mA==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -4942,16 +5497,29 @@ } }, "node_modules/@types/react-dom": { - "version": "17.0.16", + "version": "17.0.17", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.17.tgz", + "integrity": "sha512-VjnqEmqGnasQKV0CWLevqMTXBYG9GbwuE6x3VetERLh0cq2LTptFE73MrQi2S7GkKXCf2GgwItB/melLnxfnsg==", "dev": true, - "license": "MIT", "dependencies": { "@types/react": "^17" } }, + "node_modules/@types/react-dom/node_modules/@types/react": { + "version": "17.0.50", + "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", + "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", + "dev": true, + "dependencies": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + }, "node_modules/@types/react-router": { - "version": "5.1.18", - "license": "MIT", + "version": "5.1.19", + "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.19.tgz", + "integrity": "sha512-Fv/5kb2STAEMT3wHzdKQK2z8xKq38EDIGVrutYLmQVVLe+4orDFquU52hQrULnEHinMKv9FSA6lf9+uNT1ITtA==", "dependencies": { "@types/history": "^4.7.11", "@types/react": "*" @@ -4959,7 +5527,8 @@ }, "node_modules/@types/react-router-config": { "version": "5.0.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/react-router-config/-/react-router-config-5.0.6.tgz", + "integrity": "sha512-db1mx37a1EJDf1XeX8jJN7R3PZABmJQXR8r28yUjVMFSjkmnQo6X6pOEEmNl+Tp2gYQOGPdYbFIipBtdElZ3Yg==", "dependencies": { "@types/history": "^4.7.11", "@types/react": "*", @@ -4968,7 +5537,8 @@ }, "node_modules/@types/react-router-dom": { "version": "5.3.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.3.3.tgz", + "integrity": "sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==", "dependencies": { "@types/history": "^4.7.11", "@types/react": "*", @@ -4977,81 +5547,116 @@ }, "node_modules/@types/retry": { "version": "0.12.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==" }, "node_modules/@types/sax": { "version": "1.2.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-pSAff4IAxJjfAXUG6tFkO7dsSbTmf8CtUpfhhZ5VhkRpC4628tJhh3+V6H1E+/Gs9piSzYKT5yzHO5M4GG9jkw==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/scheduler": { "version": "0.16.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", + "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==" }, "node_modules/@types/serve-index": { "version": "1.9.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz", + "integrity": "sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg==", "dependencies": { "@types/express": "*" } }, "node_modules/@types/serve-static": { - "version": "1.13.10", - "license": "MIT", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==", "dependencies": { - "@types/mime": "^1", + "@types/mime": "*", "@types/node": "*" } }, + "node_modules/@types/sinonjs__fake-timers": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", + "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", + "dev": true + }, + "node_modules/@types/sizzle": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.3.tgz", + "integrity": "sha512-JYM8x9EGF163bEyhdJBpR2QX1R5naCJHC8ucJylJ3w9/CVBaskdQ8WqBf8MmQrd1kRvp/a4TS8HJ+bxzR7ZJYQ==", + "dev": true + }, "node_modules/@types/sockjs": { "version": "0.3.33", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.33.tgz", + "integrity": "sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/stack-utils": { "version": "2.0.1", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true }, "node_modules/@types/testing-library__jest-dom": { - "version": "5.14.3", + "version": "5.14.5", + "resolved": "https://registry.npmjs.org/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.5.tgz", + "integrity": "sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ==", "dev": true, - "license": "MIT", "dependencies": { "@types/jest": "*" } }, "node_modules/@types/unist": { "version": "2.0.6", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz", + "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" }, "node_modules/@types/ws": { "version": "8.5.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.3.tgz", + "integrity": "sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w==", "dependencies": { "@types/node": "*" } }, "node_modules/@types/yargs": { "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", "dev": true, - "license": "MIT", "dependencies": { "@types/yargs-parser": "*" } }, "node_modules/@types/yargs-parser": { "version": "21.0.0", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz", + "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==", + "dev": true + }, + "node_modules/@types/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-Cn6WYCm0tXv8p6k+A8PvbDG763EDpBoTzHdA+Q/MF6H3sapGjCm9NzoaJncJS9tUKSuCoDs9XHxYYsQDgxR6kw==", "dev": true, - "license": "MIT" + "optional": true, + "dependencies": { + "@types/node": "*" + } }, "node_modules/@webassemblyjs/ast": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", + "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==", "dependencies": { "@webassemblyjs/helper-numbers": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1" @@ -5059,19 +5664,23 @@ }, "node_modules/@webassemblyjs/floating-point-hex-parser": { "version": "1.11.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz", + "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==" }, "node_modules/@webassemblyjs/helper-api-error": { "version": "1.11.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz", + "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==" }, "node_modules/@webassemblyjs/helper-buffer": { "version": "1.11.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz", + "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==" }, "node_modules/@webassemblyjs/helper-numbers": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz", + "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==", "dependencies": { "@webassemblyjs/floating-point-hex-parser": "1.11.1", "@webassemblyjs/helper-api-error": "1.11.1", @@ -5080,11 +5689,13 @@ }, "node_modules/@webassemblyjs/helper-wasm-bytecode": { "version": "1.11.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz", + "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==" }, "node_modules/@webassemblyjs/helper-wasm-section": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz", + "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", @@ -5094,25 +5705,29 @@ }, "node_modules/@webassemblyjs/ieee754": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz", + "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==", "dependencies": { "@xtuc/ieee754": "^1.2.0" } }, "node_modules/@webassemblyjs/leb128": { "version": "1.11.1", - "license": "Apache-2.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz", + "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==", "dependencies": { "@xtuc/long": "4.2.2" } }, "node_modules/@webassemblyjs/utf8": { "version": "1.11.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz", + "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==" }, "node_modules/@webassemblyjs/wasm-edit": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz", + "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", @@ -5126,7 +5741,8 @@ }, "node_modules/@webassemblyjs/wasm-gen": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz", + "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", @@ -5137,7 +5753,8 @@ }, "node_modules/@webassemblyjs/wasm-opt": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz", + "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", @@ -5147,7 +5764,8 @@ }, "node_modules/@webassemblyjs/wasm-parser": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz", + "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-api-error": "1.11.1", @@ -5159,7 +5777,8 @@ }, "node_modules/@webassemblyjs/wast-printer": { "version": "1.11.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz", + "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==", "dependencies": { "@webassemblyjs/ast": "1.11.1", "@xtuc/long": "4.2.2" @@ -5167,20 +5786,24 @@ }, "node_modules/@xtuc/ieee754": { "version": "1.2.0", - "license": "BSD-3-Clause" + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==" }, "node_modules/@xtuc/long": { "version": "4.2.2", - "license": "Apache-2.0" + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==" }, "node_modules/abab": { "version": "2.0.6", - "dev": true, - "license": "BSD-3-Clause" + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", + "dev": true }, "node_modules/accepts": { "version": "1.3.8", - "license": "MIT", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" @@ -5190,8 +5813,9 @@ } }, "node_modules/acorn": { - "version": "8.7.1", - "license": "MIT", + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.0.tgz", + "integrity": "sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==", "bin": { "acorn": "bin/acorn" }, @@ -5201,8 +5825,9 @@ }, "node_modules/acorn-globals": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-6.0.0.tgz", + "integrity": "sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==", "dev": true, - "license": "MIT", "dependencies": { "acorn": "^7.1.1", "acorn-walk": "^7.1.1" @@ -5210,8 +5835,9 @@ }, "node_modules/acorn-globals/node_modules/acorn": { "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", "dev": true, - "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -5221,30 +5847,34 @@ }, "node_modules/acorn-import-assertions": { "version": "1.8.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz", + "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==", "peerDependencies": { "acorn": "^8" } }, "node_modules/acorn-walk": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", + "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", "dev": true, - "license": "MIT", "engines": { "node": ">=0.4.0" } }, "node_modules/address": { - "version": "1.2.0", - "license": "MIT", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/address/-/address-1.2.1.tgz", + "integrity": "sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA==", "engines": { "node": ">= 10.0.0" } }, "node_modules/agent-base": { "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "dev": true, - "license": "MIT", "dependencies": { "debug": "4" }, @@ -5254,7 +5884,8 @@ }, "node_modules/aggregate-error": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" @@ -5265,7 +5896,8 @@ }, "node_modules/ajv": { "version": "6.12.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -5279,7 +5911,8 @@ }, "node_modules/ajv-formats": { "version": "2.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", "dependencies": { "ajv": "^8.0.0" }, @@ -5294,7 +5927,8 @@ }, "node_modules/ajv-formats/node_modules/ajv": { "version": "8.11.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -5308,59 +5942,66 @@ }, "node_modules/ajv-keywords": { "version": "3.5.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", "peerDependencies": { "ajv": "^6.9.1" } }, "node_modules/ajv/node_modules/json-schema-traverse": { "version": "0.4.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, "node_modules/algoliasearch": { - "version": "4.13.0", - "license": "MIT", - "dependencies": { - "@algolia/cache-browser-local-storage": "4.13.0", - "@algolia/cache-common": "4.13.0", - "@algolia/cache-in-memory": "4.13.0", - "@algolia/client-account": "4.13.0", - "@algolia/client-analytics": "4.13.0", - "@algolia/client-common": "4.13.0", - "@algolia/client-personalization": "4.13.0", - "@algolia/client-search": "4.13.0", - "@algolia/logger-common": "4.13.0", - "@algolia/logger-console": "4.13.0", - "@algolia/requester-browser-xhr": "4.13.0", - "@algolia/requester-common": "4.13.0", - "@algolia/requester-node-http": "4.13.0", - "@algolia/transporter": "4.13.0" + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.14.2.tgz", + "integrity": "sha512-ngbEQonGEmf8dyEh5f+uOIihv4176dgbuOZspiuhmTTBRBuzWu3KCGHre6uHj5YyuC7pNvQGzB6ZNJyZi0z+Sg==", + "dependencies": { + "@algolia/cache-browser-local-storage": "4.14.2", + "@algolia/cache-common": "4.14.2", + "@algolia/cache-in-memory": "4.14.2", + "@algolia/client-account": "4.14.2", + "@algolia/client-analytics": "4.14.2", + "@algolia/client-common": "4.14.2", + "@algolia/client-personalization": "4.14.2", + "@algolia/client-search": "4.14.2", + "@algolia/logger-common": "4.14.2", + "@algolia/logger-console": "4.14.2", + "@algolia/requester-browser-xhr": "4.14.2", + "@algolia/requester-common": "4.14.2", + "@algolia/requester-node-http": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "node_modules/algoliasearch-helper": { - "version": "3.8.2", - "license": "MIT", + "version": "3.11.1", + "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.11.1.tgz", + "integrity": "sha512-mvsPN3eK4E0bZG0/WlWJjeqe/bUD2KOEVOl0GyL/TGXn6wcpZU8NOuztGHCUKXkyg5gq6YzUakVTmnmSSO5Yiw==", "dependencies": { "@algolia/events": "^4.0.1" }, "peerDependencies": { - "algoliasearch": ">= 3.1 < 5" + "algoliasearch": ">= 3.1 < 6" } }, "node_modules/ansi-align": { "version": "3.0.1", - "license": "ISC", + "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", + "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", "dependencies": { "string-width": "^4.1.0" } }, "node_modules/ansi-align/node_modules/emoji-regex": { "version": "8.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/ansi-align/node_modules/string-width": { "version": "4.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -5370,10 +6011,20 @@ "node": ">=8" } }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/ansi-escapes": { "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dev": true, - "license": "MIT", "dependencies": { "type-fest": "^0.21.3" }, @@ -5386,8 +6037,9 @@ }, "node_modules/ansi-escapes/node_modules/type-fest": { "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", "dev": true, - "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" }, @@ -5397,24 +6049,27 @@ }, "node_modules/ansi-html-community": { "version": "0.0.8", + "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", + "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==", "engines": [ "node >= 0.8.0" ], - "license": "Apache-2.0", "bin": { "ansi-html": "bin/ansi-html" } }, "node_modules/ansi-regex": { "version": "5.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "engines": { "node": ">=8" } }, "node_modules/ansi-styles": { "version": "3.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dependencies": { "color-convert": "^1.9.0" }, @@ -5424,11 +6079,13 @@ }, "node_modules/any-base": { "version": "1.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/any-base/-/any-base-1.1.0.tgz", + "integrity": "sha512-uMgjozySS8adZZYePpaWs8cxB9/kdzmpX6SgJZ+wbz1K5eYk5QMYDVJaZKhxyIHUdnnJkfR7SVgStgH7LkGUyg==" }, "node_modules/anymatch": { "version": "3.1.2", - "license": "ISC", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" @@ -5437,76 +6094,76 @@ "node": ">= 8" } }, - "node_modules/aproba": { - "version": "1.2.0", - "license": "ISC" - }, - "node_modules/are-we-there-yet": { - "version": "1.1.7", - "license": "ISC", - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^2.0.6" - } - }, - "node_modules/are-we-there-yet/node_modules/isarray": { - "version": "1.0.0", - "license": "MIT" - }, - "node_modules/are-we-there-yet/node_modules/readable-stream": { - "version": "2.3.7", - "license": "MIT", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/are-we-there-yet/node_modules/string_decoder": { - "version": "1.1.1", - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.1.0" - } + "node_modules/arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] }, "node_modules/arg": { - "version": "5.0.1", - "license": "MIT" + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==" }, "node_modules/argparse": { "version": "2.0.1", - "license": "Python-2.0" + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "node_modules/aria-query": { - "version": "5.0.0", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.0.2.tgz", + "integrity": "sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q==", "dev": true, - "license": "Apache-2.0", "engines": { "node": ">=6.0" } }, "node_modules/array-flatten": { "version": "2.1.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", + "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==" }, "node_modules/array-union": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", "engines": { "node": ">=8" } }, "node_modules/asap": { "version": "2.0.6", - "license": "MIT" + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dev": true, + "dependencies": { + "safer-buffer": "~2.1.0" + } }, "node_modules/asn1.js": { "version": "5.4.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", + "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", "dependencies": { "bn.js": "^4.0.0", "inherits": "^2.0.1", @@ -5516,11 +6173,13 @@ }, "node_modules/asn1.js/node_modules/bn.js": { "version": "4.12.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/assert": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/assert/-/assert-2.0.0.tgz", + "integrity": "sha512-se5Cd+js9dXJnu6Ag2JFc00t+HmHOen+8Q+L7O9zI0PqQXr20uk2J0XQqMxZEeo5U50o8Nvmmx7dZrl+Ufr35A==", "dependencies": { "es6-object-assign": "^1.1.0", "is-nan": "^1.2.1", @@ -5528,38 +6187,47 @@ "util": "^0.12.0" } }, - "node_modules/async": { - "version": "2.6.4", - "license": "MIT", - "dependencies": { - "lodash": "^4.17.14" + "node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "engines": { + "node": ">=8" } }, + "node_modules/async": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==", + "dev": true + }, "node_modules/asynckit": { "version": "0.4.0", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/at-least-node": { "version": "1.0.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", "engines": { "node": ">= 4.0.0" } }, - "node_modules/atob": { - "version": "2.1.2", - "dev": true, - "license": "(MIT OR Apache-2.0)", - "bin": { - "atob": "bin/atob.js" - }, - "engines": { - "node": ">= 4.5.0" - } - }, "node_modules/autoprefixer": { - "version": "10.4.7", + "version": "10.4.12", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.12.tgz", + "integrity": "sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q==", "funding": [ { "type": "opencollective", @@ -5570,10 +6238,9 @@ "url": "https://tidelift.com/funding/github/npm/autoprefixer" } ], - "license": "MIT", "dependencies": { - "browserslist": "^4.20.3", - "caniuse-lite": "^1.0.30001335", + "browserslist": "^4.21.4", + "caniuse-lite": "^1.0.30001407", "fraction.js": "^4.2.0", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", @@ -5591,7 +6258,8 @@ }, "node_modules/available-typed-arrays": { "version": "1.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", + "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", "engines": { "node": ">= 0.4" }, @@ -5599,17 +6267,35 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/axios": { - "version": "0.25.0", - "license": "MIT", - "dependencies": { - "follow-redirects": "^1.14.7" + "node_modules/aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", + "dev": true + }, + "node_modules/axios": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz", + "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==", + "dependencies": { + "follow-redirects": "^1.14.9", + "form-data": "^4.0.0" } }, "node_modules/babel-jest": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-27.5.1.tgz", + "integrity": "sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg==", "dev": true, - "license": "MIT", "dependencies": { "@jest/transform": "^27.5.1", "@jest/types": "^27.5.1", @@ -5629,8 +6315,9 @@ }, "node_modules/babel-jest/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -5643,8 +6330,9 @@ }, "node_modules/babel-jest/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -5658,8 +6346,9 @@ }, "node_modules/babel-jest/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -5669,21 +6358,24 @@ }, "node_modules/babel-jest/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/babel-jest/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/babel-jest/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -5693,7 +6385,8 @@ }, "node_modules/babel-loader": { "version": "8.2.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.5.tgz", + "integrity": "sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==", "dependencies": { "find-cache-dir": "^3.3.1", "loader-utils": "^2.0.0", @@ -5710,7 +6403,8 @@ }, "node_modules/babel-plugin-apply-mdx-type-prop": { "version": "1.6.22", - "license": "MIT", + "resolved": "https://registry.npmjs.org/babel-plugin-apply-mdx-type-prop/-/babel-plugin-apply-mdx-type-prop-1.6.22.tgz", + "integrity": "sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ==", "dependencies": { "@babel/helper-plugin-utils": "7.10.4", "@mdx-js/util": "1.6.22" @@ -5725,18 +6419,21 @@ }, "node_modules/babel-plugin-apply-mdx-type-prop/node_modules/@babel/helper-plugin-utils": { "version": "7.10.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", + "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, "node_modules/babel-plugin-dynamic-import-node": { "version": "2.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz", + "integrity": "sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ==", "dependencies": { "object.assign": "^4.1.0" } }, "node_modules/babel-plugin-extract-import-names": { "version": "1.6.22", - "license": "MIT", + "resolved": "https://registry.npmjs.org/babel-plugin-extract-import-names/-/babel-plugin-extract-import-names-1.6.22.tgz", + "integrity": "sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ==", "dependencies": { "@babel/helper-plugin-utils": "7.10.4" }, @@ -5747,12 +6444,14 @@ }, "node_modules/babel-plugin-extract-import-names/node_modules/@babel/helper-plugin-utils": { "version": "7.10.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", + "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, "node_modules/babel-plugin-istanbul": { "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@istanbuljs/load-nyc-config": "^1.0.0", @@ -5766,8 +6465,9 @@ }, "node_modules/babel-plugin-jest-hoist": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz", + "integrity": "sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ==", "dev": true, - "license": "MIT", "dependencies": { "@babel/template": "^7.3.3", "@babel/types": "^7.3.3", @@ -5779,11 +6479,12 @@ } }, "node_modules/babel-plugin-polyfill-corejs2": { - "version": "0.3.1", - "license": "MIT", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz", + "integrity": "sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q==", "dependencies": { - "@babel/compat-data": "^7.13.11", - "@babel/helper-define-polyfill-provider": "^0.3.1", + "@babel/compat-data": "^7.17.7", + "@babel/helper-define-polyfill-provider": "^0.3.3", "semver": "^6.1.1" }, "peerDependencies": { @@ -5792,27 +6493,30 @@ }, "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/babel-plugin-polyfill-corejs3": { - "version": "0.5.2", - "license": "MIT", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz", + "integrity": "sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA==", "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.3.1", - "core-js-compat": "^3.21.0" + "@babel/helper-define-polyfill-provider": "^0.3.3", + "core-js-compat": "^3.25.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "node_modules/babel-plugin-polyfill-regenerator": { - "version": "0.3.1", - "license": "MIT", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz", + "integrity": "sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw==", "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.3.1" + "@babel/helper-define-polyfill-provider": "^0.3.3" }, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -5820,7 +6524,8 @@ }, "node_modules/babel-plugin-styled-components": { "version": "2.0.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/babel-plugin-styled-components/-/babel-plugin-styled-components-2.0.7.tgz", + "integrity": "sha512-i7YhvPgVqRKfoQ66toiZ06jPNA3p6ierpfUuEWxNF+fV27Uv5gxBkf8KZLHUCc1nFA9j6+80pYoIpqCeyW3/bA==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.16.0", "@babel/helper-module-imports": "^7.16.0", @@ -5834,12 +6539,14 @@ }, "node_modules/babel-plugin-syntax-jsx": { "version": "6.18.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz", + "integrity": "sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw==" }, "node_modules/babel-preset-current-node-syntax": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", + "integrity": "sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==", "dev": true, - "license": "MIT", "dependencies": { "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-bigint": "^7.8.3", @@ -5860,8 +6567,9 @@ }, "node_modules/babel-preset-jest": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz", + "integrity": "sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag==", "dev": true, - "license": "MIT", "dependencies": { "babel-plugin-jest-hoist": "^27.5.1", "babel-preset-current-node-syntax": "^1.0.0" @@ -5875,7 +6583,8 @@ }, "node_modules/bail": { "version": "1.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", + "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -5883,14 +6592,18 @@ }, "node_modules/balanced-match": { "version": "1.0.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "node_modules/base16": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz", + "integrity": "sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==" }, "node_modules/base64-js": { "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", "funding": [ { "type": "github", @@ -5904,73 +6617,74 @@ "type": "consulting", "url": "https://feross.org/support" } - ], - "license": "MIT" + ] }, "node_modules/batch": { "version": "0.6.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", + "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==" + }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dev": true, + "dependencies": { + "tweetnacl": "^0.14.3" + } }, "node_modules/big.js": { "version": "5.2.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", "engines": { "node": "*" } }, "node_modules/binary-extensions": { "version": "2.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", "engines": { "node": ">=8" } }, "node_modules/bl": { "version": "4.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, - "node_modules/bl/node_modules/buffer": { - "version": "5.7.1", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } + "node_modules/blob-util": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", + "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", + "dev": true }, "node_modules/bluebird": { "version": "3.7.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true }, "node_modules/bmp-js": { "version": "0.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/bmp-js/-/bmp-js-0.1.0.tgz", + "integrity": "sha512-vHdS19CnY3hwiNdkaqk93DvjVLfbEcI8mys4UjuWrlX1haDmroo8o4xCzh4wD6DGV6HxRCyauwhHRqMTfERtjw==" }, "node_modules/bn.js": { - "version": "5.2.0", - "license": "MIT" + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", + "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" }, "node_modules/body-parser": { - "version": "1.20.0", - "license": "MIT", + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", + "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.4", @@ -5980,7 +6694,7 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.10.3", + "qs": "6.11.0", "raw-body": "2.5.1", "type-is": "~1.6.18", "unpipe": "1.0.0" @@ -5992,39 +6706,59 @@ }, "node_modules/body-parser/node_modules/bytes": { "version": "3.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", "engines": { "node": ">= 0.8" } }, "node_modules/body-parser/node_modules/debug": { "version": "2.6.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/body-parser/node_modules/ms": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/body-parser/node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/bonjour-service": { - "version": "1.0.12", - "license": "MIT", + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.0.14.tgz", + "integrity": "sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ==", "dependencies": { "array-flatten": "^2.1.2", "dns-equal": "^1.0.0", "fast-deep-equal": "^3.1.3", - "multicast-dns": "^7.2.4" + "multicast-dns": "^7.2.5" } }, "node_modules/boolbase": { "version": "1.0.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" }, "node_modules/boxen": { "version": "6.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-6.2.1.tgz", + "integrity": "sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw==", "dependencies": { "ansi-align": "^3.0.1", "camelcase": "^6.2.0", @@ -6044,7 +6778,8 @@ }, "node_modules/boxen/node_modules/ansi-styles": { "version": "4.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { "color-convert": "^2.0.1" }, @@ -6057,7 +6792,8 @@ }, "node_modules/boxen/node_modules/chalk": { "version": "4.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -6071,7 +6807,8 @@ }, "node_modules/boxen/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, @@ -6081,18 +6818,21 @@ }, "node_modules/boxen/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/boxen/node_modules/has-flag": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/boxen/node_modules/supports-color": { "version": "7.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { "has-flag": "^4.0.0" }, @@ -6102,7 +6842,8 @@ }, "node_modules/brace-expansion": { "version": "1.1.11", - "license": "MIT", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -6110,7 +6851,8 @@ }, "node_modules/braces": { "version": "3.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "dependencies": { "fill-range": "^7.0.1" }, @@ -6120,16 +6862,19 @@ }, "node_modules/brorand": { "version": "1.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==" }, "node_modules/browser-process-hrtime": { "version": "1.0.0", - "dev": true, - "license": "BSD-2-Clause" + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true }, "node_modules/browserify-aes": { "version": "1.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", "dependencies": { "buffer-xor": "^1.0.3", "cipher-base": "^1.0.0", @@ -6141,7 +6886,8 @@ }, "node_modules/browserify-cipher": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", + "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", "dependencies": { "browserify-aes": "^1.0.4", "browserify-des": "^1.0.0", @@ -6150,7 +6896,8 @@ }, "node_modules/browserify-des": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", + "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", "dependencies": { "cipher-base": "^1.0.1", "des.js": "^1.0.0", @@ -6160,7 +6907,8 @@ }, "node_modules/browserify-rsa": { "version": "4.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", + "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", "dependencies": { "bn.js": "^5.0.0", "randombytes": "^2.0.1" @@ -6168,7 +6916,8 @@ }, "node_modules/browserify-sign": { "version": "4.2.1", - "license": "ISC", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz", + "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==", "dependencies": { "bn.js": "^5.1.1", "browserify-rsa": "^4.0.1", @@ -6181,33 +6930,18 @@ "safe-buffer": "^5.2.0" } }, - "node_modules/browserify-sign/node_modules/safe-buffer": { - "version": "5.2.1", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, "node_modules/browserify-zlib": { "version": "0.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", + "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", "dependencies": { "pako": "~1.0.5" } }, "node_modules/browserslist": { - "version": "4.20.3", + "version": "4.21.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", + "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", "funding": [ { "type": "opencollective", @@ -6218,13 +6952,11 @@ "url": "https://tidelift.com/funding/github/npm/browserslist" } ], - "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001332", - "electron-to-chromium": "^1.4.118", - "escalade": "^3.1.1", - "node-releases": "^2.0.3", - "picocolors": "^1.0.0" + "caniuse-lite": "^1.0.30001400", + "electron-to-chromium": "^1.4.251", + "node-releases": "^2.0.6", + "update-browserslist-db": "^1.0.9" }, "bin": { "browserslist": "cli.js" @@ -6235,14 +6967,17 @@ }, "node_modules/bser": { "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", "dev": true, - "license": "Apache-2.0", "dependencies": { "node-int64": "^0.4.0" } }, "node_modules/buffer": { - "version": "6.0.3", + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", "funding": [ { "type": "github", @@ -6257,41 +6992,55 @@ "url": "https://feross.org/support" } ], - "license": "MIT", "dependencies": { "base64-js": "^1.3.1", - "ieee754": "^1.2.1" + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true, + "engines": { + "node": "*" } }, "node_modules/buffer-equal": { "version": "0.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/buffer-equal/-/buffer-equal-0.0.1.tgz", + "integrity": "sha512-RgSV6InVQ9ODPdLWJ5UAqBqJBOg370Nz6ZQtRzpt6nUjc8v0St97uJ4PYC6NztqIScrAXafKM3mZPMygSe1ggA==", "engines": { "node": ">=0.4.0" } }, "node_modules/buffer-from": { "version": "1.1.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" }, "node_modules/buffer-xor": { "version": "1.0.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", + "integrity": "sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==" }, "node_modules/builtin-status-codes": { "version": "3.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", + "integrity": "sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ==" }, "node_modules/bytes": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==", "engines": { "node": ">= 0.8" } }, "node_modules/cacheable-request": { "version": "6.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", "dependencies": { "clone-response": "^1.0.2", "get-stream": "^5.1.0", @@ -6305,36 +7054,35 @@ "node": ">=8" } }, - "node_modules/cacheable-request/node_modules/get-stream": { - "version": "5.2.0", - "license": "MIT", - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/cacheable-request/node_modules/lowercase-keys": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", "engines": { "node": ">=8" } }, "node_modules/cacheable-request/node_modules/normalize-url": { "version": "4.5.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", + "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==", "engines": { "node": ">=8" } }, + "node_modules/cachedir": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", + "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/call-bind": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", "dependencies": { "function-bind": "^1.1.1", "get-intrinsic": "^1.0.2" @@ -6345,18 +7093,21 @@ }, "node_modules/call-me-maybe": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.1.tgz", + "integrity": "sha512-wCyFsDQkKPwwF8BDwOiWNx/9K45L/hvggQiDbve+viMNMQnWhrlYIuBk09offfwCRtCO9P6XwUttufzU11WCVw==" }, "node_modules/callsites": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "engines": { "node": ">=6" } }, "node_modules/camel-case": { "version": "4.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" @@ -6364,7 +7115,8 @@ }, "node_modules/camelcase": { "version": "6.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "engines": { "node": ">=10" }, @@ -6374,18 +7126,24 @@ }, "node_modules/camelcase-css": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", "engines": { "node": ">= 6" } }, "node_modules/camelize": { - "version": "1.0.0", - "license": "MIT" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/camelize/-/camelize-1.0.1.tgz", + "integrity": "sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/caniuse-api": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", + "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", "dependencies": { "browserslist": "^4.0.0", "caniuse-lite": "^1.0.0", @@ -6394,7 +7152,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001335", + "version": "1.0.30001421", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001421.tgz", + "integrity": "sha512-Sw4eLbgUJAEhjLs1Fa+mk45sidp1wRn5y6GtDpHGBaNJ9OCDJaVh2tIaWWUnGfuXfKf1JCBaIarak3FkVAvEeA==", "funding": [ { "type": "opencollective", @@ -6404,12 +7164,18 @@ "type": "tidelift", "url": "https://tidelift.com/funding/github/npm/caniuse-lite" } - ], - "license": "CC-BY-4.0" + ] + }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", + "dev": true }, "node_modules/ccount": { "version": "1.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz", + "integrity": "sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -6417,7 +7183,8 @@ }, "node_modules/chalk": { "version": "2.4.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -6429,15 +7196,17 @@ }, "node_modules/char-regex": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", "dev": true, - "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/character-entities": { "version": "1.2.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", + "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -6445,7 +7214,8 @@ }, "node_modules/character-entities-legacy": { "version": "1.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", + "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -6453,23 +7223,34 @@ }, "node_modules/character-reference-invalid": { "version": "1.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", + "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/check-more-types": { + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", + "integrity": "sha512-Pj779qHxV2tuapviy1bSZNEL1maXr13bPYpsvSDB68HlYcYuhlDrmGd63i0JHMCLKzc7rUSNIrpdJlhVlNwrxA==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/cheerio": { - "version": "1.0.0-rc.10", - "license": "MIT", + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", + "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", "dependencies": { - "cheerio-select": "^1.5.0", - "dom-serializer": "^1.3.2", - "domhandler": "^4.2.0", - "htmlparser2": "^6.1.0", - "parse5": "^6.0.1", - "parse5-htmlparser2-tree-adapter": "^6.0.1", - "tslib": "^2.2.0" + "cheerio-select": "^2.1.0", + "dom-serializer": "^2.0.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "htmlparser2": "^8.0.1", + "parse5": "^7.0.0", + "parse5-htmlparser2-tree-adapter": "^7.0.0" }, "engines": { "node": ">= 6" @@ -6479,14 +7260,16 @@ } }, "node_modules/cheerio-select": { - "version": "1.6.0", - "license": "BSD-2-Clause", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", + "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", "dependencies": { - "css-select": "^4.3.0", - "css-what": "^6.0.1", - "domelementtype": "^2.2.0", - "domhandler": "^4.3.1", - "domutils": "^2.8.0" + "boolbase": "^1.0.0", + "css-select": "^5.1.0", + "css-what": "^6.1.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1" }, "funding": { "url": "https://github.com/sponsors/fb55" @@ -6494,13 +7277,14 @@ }, "node_modules/chokidar": { "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "funding": [ { "type": "individual", "url": "https://paulmillr.com/funding/" } ], - "license": "MIT", "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -6519,23 +7303,27 @@ }, "node_modules/chownr": { "version": "1.1.4", - "license": "ISC" + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" }, "node_modules/chrome-trace-event": { "version": "1.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", + "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==", "engines": { "node": ">=6.0" } }, "node_modules/ci-info": { - "version": "3.3.0", - "dev": true, - "license": "MIT" + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.5.0.tgz", + "integrity": "sha512-yH4RezKOGlOhxkmhbeNuC4eYZKAUsEaGtBuBzDDP1eFUKiccDWzBABxBfOx31IDwDIXMTxWuwAxUGModvkbuVw==", + "dev": true }, "node_modules/cipher-base": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", "dependencies": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" @@ -6543,16 +7331,19 @@ }, "node_modules/cjs-module-lexer": { "version": "1.2.2", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz", + "integrity": "sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==", + "dev": true }, "node_modules/classnames": { - "version": "2.3.1", - "license": "MIT" + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", + "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" }, "node_modules/clean-css": { - "version": "5.3.0", - "license": "MIT", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-5.3.1.tgz", + "integrity": "sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg==", "dependencies": { "source-map": "~0.6.0" }, @@ -6562,14 +7353,16 @@ }, "node_modules/clean-stack": { "version": "2.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", "engines": { "node": ">=6" } }, "node_modules/cli-boxes": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", + "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==", "engines": { "node": ">=10" }, @@ -6577,9 +7370,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/cli-table3": { - "version": "0.6.2", - "license": "MIT", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz", + "integrity": "sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==", "dependencies": { "string-width": "^4.2.0" }, @@ -6592,11 +7398,49 @@ }, "node_modules/cli-table3/node_modules/emoji-regex": { "version": "8.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/cli-table3/node_modules/string-width": { "version": "4.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dev": true, + "dependencies": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/cli-truncate/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -6608,7 +7452,9 @@ }, "node_modules/cliui": { "version": "7.0.4", - "license": "ISC", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", @@ -6617,7 +7463,9 @@ }, "node_modules/cliui/node_modules/ansi-styles": { "version": "4.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "dependencies": { "color-convert": "^2.0.1" }, @@ -6630,7 +7478,9 @@ }, "node_modules/cliui/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "dependencies": { "color-name": "~1.1.4" }, @@ -6640,15 +7490,21 @@ }, "node_modules/cliui/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/cliui/node_modules/emoji-regex": { "version": "8.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "node_modules/cliui/node_modules/string-width": { "version": "4.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -6660,7 +7516,9 @@ }, "node_modules/cliui/node_modules/wrap-ansi": { "version": "7.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -6675,7 +7533,8 @@ }, "node_modules/clone-deep": { "version": "4.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", "dependencies": { "is-plain-object": "^2.0.4", "kind-of": "^6.0.2", @@ -6686,57 +7545,46 @@ } }, "node_modules/clone-response": { - "version": "1.0.2", - "license": "MIT", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz", + "integrity": "sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==", "dependencies": { "mimic-response": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/clone-response/node_modules/mimic-response": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", "engines": { "node": ">=4" } }, "node_modules/clsx": { - "version": "1.1.1", - "license": "MIT", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", + "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", "engines": { "node": ">=6" } }, "node_modules/co": { "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", "dev": true, - "license": "MIT", "engines": { "iojs": ">= 1.0.0", "node": ">= 0.12.0" } }, - "node_modules/coa": { - "version": "2.0.2", - "license": "MIT", - "dependencies": { - "@types/q": "^1.5.1", - "chalk": "^2.4.1", - "q": "^1.1.2" - }, - "engines": { - "node": ">= 4.0" - } - }, - "node_modules/code-point-at": { - "version": "1.1.0", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/collapse-white-space": { "version": "1.0.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz", + "integrity": "sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -6744,12 +7592,14 @@ }, "node_modules/collect-v8-coverage": { "version": "1.0.1", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz", + "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==", + "dev": true }, "node_modules/color": { "version": "3.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", + "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", "dependencies": { "color-convert": "^1.9.3", "color-string": "^1.6.0" @@ -6757,42 +7607,48 @@ }, "node_modules/color-convert": { "version": "1.9.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dependencies": { "color-name": "1.1.3" } }, "node_modules/color-name": { "version": "1.1.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, "node_modules/color-string": { "version": "1.9.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "node_modules/colord": { - "version": "2.9.2", - "license": "MIT" + "version": "2.9.3", + "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", + "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==" }, "node_modules/colorette": { - "version": "1.4.0", - "license": "MIT" + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz", + "integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==" }, "node_modules/combine-promises": { "version": "1.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/combine-promises/-/combine-promises-1.1.0.tgz", + "integrity": "sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg==", "engines": { "node": ">=10" } }, "node_modules/combined-stream": { "version": "1.0.8", - "dev": true, - "license": "MIT", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", "dependencies": { "delayed-stream": "~1.0.0" }, @@ -6802,7 +7658,8 @@ }, "node_modules/comma-separated-tokens": { "version": "1.0.8", - "license": "MIT", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", + "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -6810,18 +7667,30 @@ }, "node_modules/commander": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", + "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", "engines": { "node": ">= 6" } }, + "node_modules/common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, "node_modules/commondir": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==" }, "node_modules/compressible": { "version": "2.0.18", - "license": "MIT", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", "dependencies": { "mime-db": ">= 1.43.0 < 2" }, @@ -6831,7 +7700,8 @@ }, "node_modules/compression": { "version": "1.7.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", "dependencies": { "accepts": "~1.3.5", "bytes": "3.0.0", @@ -6847,22 +7717,31 @@ }, "node_modules/compression/node_modules/debug": { "version": "2.6.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/compression/node_modules/ms": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/compression/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "node_modules/concat-map": { "version": "0.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "node_modules/configstore": { "version": "5.0.1", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", + "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", "dependencies": { "dot-prop": "^5.2.0", "graceful-fs": "^4.1.2", @@ -6876,66 +7755,71 @@ } }, "node_modules/connect-history-api-fallback": { - "version": "1.6.0", - "license": "MIT", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", + "integrity": "sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==", "engines": { "node": ">=0.8" } }, "node_modules/consola": { "version": "2.15.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz", + "integrity": "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==" }, "node_modules/console-browserify": { - "version": "1.2.0" - }, - "node_modules/console-control-strings": { - "version": "1.1.0", - "license": "ISC" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", + "integrity": "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==" }, "node_modules/consolidated-events": { "version": "2.0.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/consolidated-events/-/consolidated-events-2.0.2.tgz", + "integrity": "sha512-2/uRVMdRypf5z/TW/ncD/66l75P5hH2vM/GR8Jf8HLc2xnfJtmina6F6du8+v4Z2vTrMo7jC+W1tmEEuuELgkQ==" }, "node_modules/constants-browserify": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", + "integrity": "sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ==" }, "node_modules/content-disposition": { "version": "0.5.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", + "integrity": "sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA==", "engines": { "node": ">= 0.6" } }, "node_modules/content-type": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==", "engines": { "node": ">= 0.6" } }, "node_modules/convert-source-map": { - "version": "1.8.0", - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.1.1" - } + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==" }, "node_modules/cookie": { "version": "0.5.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", "engines": { "node": ">= 0.6" } }, "node_modules/cookie-signature": { "version": "1.0.6", - "license": "MIT" + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, "node_modules/copy-text-to-clipboard": { "version": "3.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz", + "integrity": "sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q==", "engines": { "node": ">=12" }, @@ -6945,7 +7829,8 @@ }, "node_modules/copy-webpack-plugin": { "version": "10.2.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-10.2.4.tgz", + "integrity": "sha512-xFVltahqlsRcyyJqQbDY6EYTtyQZF9rf+JPjwHObLdPFMEISqkFkr7mFoVOC6BfYS/dNThyoQKvziugm+OnwBg==", "dependencies": { "fast-glob": "^3.2.7", "glob-parent": "^6.0.1", @@ -6967,7 +7852,8 @@ }, "node_modules/copy-webpack-plugin/node_modules/ajv": { "version": "8.11.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -6981,7 +7867,8 @@ }, "node_modules/copy-webpack-plugin/node_modules/ajv-keywords": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, @@ -6991,7 +7878,8 @@ }, "node_modules/copy-webpack-plugin/node_modules/array-union": { "version": "3.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-3.0.1.tgz", + "integrity": "sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw==", "engines": { "node": ">=12" }, @@ -7001,7 +7889,8 @@ }, "node_modules/copy-webpack-plugin/node_modules/glob-parent": { "version": "6.0.2", - "license": "ISC", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dependencies": { "is-glob": "^4.0.3" }, @@ -7011,7 +7900,8 @@ }, "node_modules/copy-webpack-plugin/node_modules/globby": { "version": "12.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/globby/-/globby-12.2.0.tgz", + "integrity": "sha512-wiSuFQLZ+urS9x2gGPl1H5drc5twabmm4m2gTR27XDFyjUHJUNsS8o/2aKyIF6IoBaR630atdher0XJ5g6OMmA==", "dependencies": { "array-union": "^3.0.1", "dir-glob": "^3.0.1", @@ -7029,7 +7919,8 @@ }, "node_modules/copy-webpack-plugin/node_modules/schema-utils": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -7046,7 +7937,8 @@ }, "node_modules/copy-webpack-plugin/node_modules/slash": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", "engines": { "node": ">=12" }, @@ -7055,49 +7947,46 @@ } }, "node_modules/core-js": { - "version": "3.22.4", + "version": "3.25.5", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.25.5.tgz", + "integrity": "sha512-nbm6eZSjm+ZuBQxCUPQKQCoUEfFOXjUZ8dTTyikyKaWrTYmAVbykQfwsKE5dBK88u3QCkCrzsx/PPlKfhsvgpw==", "hasInstallScript": true, - "license": "MIT", "funding": { "type": "opencollective", "url": "https://opencollective.com/core-js" } }, "node_modules/core-js-compat": { - "version": "3.22.4", - "license": "MIT", + "version": "3.25.5", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.25.5.tgz", + "integrity": "sha512-ovcyhs2DEBUIE0MGEKHP4olCUW/XYte3Vroyxuh38rD1wAO4dHohsovUC4eAOuzFxE6b+RXvBU3UZ9o0YhUTkA==", "dependencies": { - "browserslist": "^4.20.3", - "semver": "7.0.0" + "browserslist": "^4.21.4" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/core-js" } }, - "node_modules/core-js-compat/node_modules/semver": { - "version": "7.0.0", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/core-js-pure": { - "version": "3.22.4", + "version": "3.25.5", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.25.5.tgz", + "integrity": "sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg==", "hasInstallScript": true, - "license": "MIT", "funding": { "type": "opencollective", "url": "https://opencollective.com/core-js" } }, "node_modules/core-util-is": { - "version": "1.0.3", - "license": "MIT" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" }, "node_modules/cosmiconfig": { "version": "7.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz", + "integrity": "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==", "dependencies": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.2.1", @@ -7111,7 +8000,8 @@ }, "node_modules/create-ecdh": { "version": "4.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz", + "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==", "dependencies": { "bn.js": "^4.1.0", "elliptic": "^6.5.3" @@ -7119,11 +8009,13 @@ }, "node_modules/create-ecdh/node_modules/bn.js": { "version": "4.12.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/create-hash": { "version": "1.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", "dependencies": { "cipher-base": "^1.0.1", "inherits": "^2.0.1", @@ -7134,7 +8026,8 @@ }, "node_modules/create-hmac": { "version": "1.1.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", "dependencies": { "cipher-base": "^1.0.3", "create-hash": "^1.1.0", @@ -7146,14 +8039,16 @@ }, "node_modules/cross-fetch": { "version": "3.1.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz", + "integrity": "sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==", "dependencies": { "node-fetch": "2.6.7" } }, "node_modules/cross-spawn": { "version": "7.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -7165,7 +8060,8 @@ }, "node_modules/crypto-browserify": { "version": "3.12.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", + "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", "dependencies": { "browserify-cipher": "^1.0.0", "browserify-sign": "^4.0.0", @@ -7185,31 +8081,24 @@ }, "node_modules/crypto-random-string": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", "engines": { "node": ">=8" } }, - "node_modules/css": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.4", - "source-map": "^0.6.1", - "source-map-resolve": "^0.6.0" - } - }, "node_modules/css-color-keywords": { "version": "1.0.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/css-color-keywords/-/css-color-keywords-1.0.0.tgz", + "integrity": "sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg==", "engines": { "node": ">=4" } }, "node_modules/css-declaration-sorter": { - "version": "6.2.2", - "license": "ISC", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz", + "integrity": "sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w==", "engines": { "node": "^10 || ^12 || >=14" }, @@ -7219,8 +8108,9 @@ }, "node_modules/css-loader": { "version": "3.6.0", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-3.6.0.tgz", + "integrity": "sha512-M5lSukoWi1If8dhQAUCvj4H8vUt3vOnwbQBH9DdTm/s4Ym2B/3dPMtYZeJmq7Q3S3Pa+I94DcZ7pc9bP14cWIQ==", "dev": true, - "license": "MIT", "dependencies": { "camelcase": "^5.3.1", "cssesc": "^3.0.0", @@ -7249,16 +8139,18 @@ }, "node_modules/css-loader/node_modules/camelcase": { "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", "dev": true, - "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/css-loader/node_modules/json5": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", "dev": true, - "license": "MIT", "dependencies": { "minimist": "^1.2.0" }, @@ -7268,8 +8160,9 @@ }, "node_modules/css-loader/node_modules/loader-utils": { "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz", + "integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==", "dev": true, - "license": "MIT", "dependencies": { "big.js": "^5.2.2", "emojis-list": "^3.0.0", @@ -7281,13 +8174,15 @@ }, "node_modules/css-loader/node_modules/picocolors": { "version": "0.2.1", - "dev": true, - "license": "ISC" + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", + "dev": true }, "node_modules/css-loader/node_modules/postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, - "license": "MIT", "dependencies": { "picocolors": "^0.2.1", "source-map": "^0.6.1" @@ -7302,15 +8197,17 @@ }, "node_modules/css-loader/node_modules/semver": { "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true, - "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/css-minimizer-webpack-plugin": { "version": "3.4.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz", + "integrity": "sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q==", "dependencies": { "cssnano": "^5.0.6", "jest-worker": "^27.0.2", @@ -7346,7 +8243,8 @@ }, "node_modules/css-minimizer-webpack-plugin/node_modules/ajv": { "version": "8.11.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -7360,7 +8258,8 @@ }, "node_modules/css-minimizer-webpack-plugin/node_modules/ajv-keywords": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, @@ -7370,7 +8269,8 @@ }, "node_modules/css-minimizer-webpack-plugin/node_modules/schema-utils": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -7386,26 +8286,24 @@ } }, "node_modules/css-select": { - "version": "4.3.0", - "license": "BSD-2-Clause", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", "dependencies": { "boolbase": "^1.0.0", - "css-what": "^6.0.1", - "domhandler": "^4.3.1", - "domutils": "^2.8.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", "nth-check": "^2.0.1" }, "funding": { "url": "https://github.com/sponsors/fb55" } }, - "node_modules/css-select-base-adapter": { - "version": "0.1.1", - "license": "MIT" - }, "node_modules/css-to-react-native": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.0.0.tgz", + "integrity": "sha512-Ro1yETZA813eoyUp2GDBhG2j+YggidUmzO1/v9eYBKR2EHVEniE2MI/NqpTQ954BMpTPZFsGNPm46qFB9dpaPQ==", "dependencies": { "camelize": "^1.0.0", "css-color-keywords": "^1.0.0", @@ -7413,10 +8311,11 @@ } }, "node_modules/css-tree": { - "version": "1.0.0-alpha.37", - "license": "MIT", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz", + "integrity": "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==", "dependencies": { - "mdn-data": "2.0.4", + "mdn-data": "2.0.14", "source-map": "^0.6.1" }, "engines": { @@ -7425,7 +8324,8 @@ }, "node_modules/css-what": { "version": "6.1.0", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", "engines": { "node": ">= 6" }, @@ -7435,12 +8335,14 @@ }, "node_modules/css.escape": { "version": "1.5.1", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", + "integrity": "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==", + "dev": true }, "node_modules/cssesc": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", "bin": { "cssesc": "bin/cssesc" }, @@ -7449,10 +8351,11 @@ } }, "node_modules/cssnano": { - "version": "5.1.7", - "license": "MIT", + "version": "5.1.13", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.1.13.tgz", + "integrity": "sha512-S2SL2ekdEz6w6a2epXn4CmMKU4K3KpcyXLKfAYc9UQQqJRkD/2eLUG0vJ3Db/9OvO5GuAdgXw3pFbR6abqghDQ==", "dependencies": { - "cssnano-preset-default": "^5.2.7", + "cssnano-preset-default": "^5.2.12", "lilconfig": "^2.0.3", "yaml": "^1.10.2" }, @@ -7468,11 +8371,12 @@ } }, "node_modules/cssnano-preset-advanced": { - "version": "5.3.3", - "license": "MIT", + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.8.tgz", + "integrity": "sha512-xUlLLnEB1LjpEik+zgRNlk8Y/koBPPtONZjp7JKbXigeAmCrFvq9H0pXW5jJV45bQWAlmJ0sKy+IMr0XxLYQZg==", "dependencies": { "autoprefixer": "^10.3.7", - "cssnano-preset-default": "^5.2.7", + "cssnano-preset-default": "^5.2.12", "postcss-discard-unused": "^5.1.0", "postcss-merge-idents": "^5.1.1", "postcss-reduce-idents": "^5.2.0", @@ -7486,34 +8390,35 @@ } }, "node_modules/cssnano-preset-default": { - "version": "5.2.7", - "license": "MIT", + "version": "5.2.12", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz", + "integrity": "sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew==", "dependencies": { - "css-declaration-sorter": "^6.2.2", + "css-declaration-sorter": "^6.3.0", "cssnano-utils": "^3.1.0", "postcss-calc": "^8.2.3", "postcss-colormin": "^5.3.0", - "postcss-convert-values": "^5.1.0", - "postcss-discard-comments": "^5.1.1", + "postcss-convert-values": "^5.1.2", + "postcss-discard-comments": "^5.1.2", "postcss-discard-duplicates": "^5.1.0", "postcss-discard-empty": "^5.1.1", "postcss-discard-overridden": "^5.1.0", - "postcss-merge-longhand": "^5.1.4", - "postcss-merge-rules": "^5.1.1", + "postcss-merge-longhand": "^5.1.6", + "postcss-merge-rules": "^5.1.2", "postcss-minify-font-values": "^5.1.0", "postcss-minify-gradients": "^5.1.1", - "postcss-minify-params": "^5.1.2", - "postcss-minify-selectors": "^5.2.0", + "postcss-minify-params": "^5.1.3", + "postcss-minify-selectors": "^5.2.1", "postcss-normalize-charset": "^5.1.0", "postcss-normalize-display-values": "^5.1.0", - "postcss-normalize-positions": "^5.1.0", - "postcss-normalize-repeat-style": "^5.1.0", + "postcss-normalize-positions": "^5.1.1", + "postcss-normalize-repeat-style": "^5.1.1", "postcss-normalize-string": "^5.1.0", "postcss-normalize-timing-functions": "^5.1.0", "postcss-normalize-unicode": "^5.1.0", "postcss-normalize-url": "^5.1.0", "postcss-normalize-whitespace": "^5.1.1", - "postcss-ordered-values": "^5.1.1", + "postcss-ordered-values": "^5.1.3", "postcss-reduce-initial": "^5.1.0", "postcss-reduce-transforms": "^5.1.0", "postcss-svgo": "^5.1.0", @@ -7528,7 +8433,8 @@ }, "node_modules/cssnano-utils": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz", + "integrity": "sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA==", "engines": { "node": "^10 || ^12 || >=14.0" }, @@ -7538,7 +8444,8 @@ }, "node_modules/csso": { "version": "4.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", + "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", "dependencies": { "css-tree": "^1.1.2" }, @@ -7546,50 +8453,215 @@ "node": ">=8.0.0" } }, - "node_modules/csso/node_modules/css-tree": { - "version": "1.1.3", - "license": "MIT", + "node_modules/cssom": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.4.4.tgz", + "integrity": "sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw==", + "dev": true + }, + "node_modules/cssstyle": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz", + "integrity": "sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==", + "dev": true, "dependencies": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" + "cssom": "~0.3.6" }, "engines": { - "node": ">=8.0.0" + "node": ">=8" } }, - "node_modules/csso/node_modules/mdn-data": { - "version": "2.0.14", - "license": "CC0-1.0" + "node_modules/cssstyle/node_modules/cssom": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz", + "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", + "dev": true }, - "node_modules/cssom": { - "version": "0.4.4", + "node_modules/csstype": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", + "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==" + }, + "node_modules/cypress": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-10.10.0.tgz", + "integrity": "sha512-bU8r44x1NIYAUNNXt3CwJpLOVth7HUv2hUhYCxZmgZ1IugowDvuHNpevnoZRQx1KKOEisLvIJW+Xen5Pjn41pg==", "dev": true, - "license": "MIT" + "hasInstallScript": true, + "dependencies": { + "@cypress/request": "^2.88.10", + "@cypress/xvfb": "^1.2.4", + "@types/node": "^14.14.31", + "@types/sinonjs__fake-timers": "8.1.1", + "@types/sizzle": "^2.3.2", + "arch": "^2.2.0", + "blob-util": "^2.0.2", + "bluebird": "^3.7.2", + "buffer": "^5.6.0", + "cachedir": "^2.3.0", + "chalk": "^4.1.0", + "check-more-types": "^2.24.0", + "cli-cursor": "^3.1.0", + "cli-table3": "~0.6.1", + "commander": "^5.1.0", + "common-tags": "^1.8.0", + "dayjs": "^1.10.4", + "debug": "^4.3.2", + "enquirer": "^2.3.6", + "eventemitter2": "6.4.7", + "execa": "4.1.0", + "executable": "^4.1.1", + "extract-zip": "2.0.1", + "figures": "^3.2.0", + "fs-extra": "^9.1.0", + "getos": "^3.2.1", + "is-ci": "^3.0.0", + "is-installed-globally": "~0.4.0", + "lazy-ass": "^1.6.0", + "listr2": "^3.8.3", + "lodash": "^4.17.21", + "log-symbols": "^4.0.0", + "minimist": "^1.2.6", + "ospath": "^1.2.2", + "pretty-bytes": "^5.6.0", + "proxy-from-env": "1.0.0", + "request-progress": "^3.0.0", + "semver": "^7.3.2", + "supports-color": "^8.1.1", + "tmp": "~0.2.1", + "untildify": "^4.0.0", + "yauzl": "^2.10.0" + }, + "bin": { + "cypress": "bin/cypress" + }, + "engines": { + "node": ">=12.0.0" + } }, - "node_modules/cssstyle": { - "version": "2.3.0", + "node_modules/cypress/node_modules/@types/node": { + "version": "14.18.32", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.32.tgz", + "integrity": "sha512-Y6S38pFr04yb13qqHf8uk1nHE3lXgQ30WZbv1mLliV9pt0NjvqdWttLcrOYLnXbOafknVYRHZGoMSpR9UwfYow==", + "dev": true + }, + "node_modules/cypress/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { - "cssom": "~0.3.6" + "color-convert": "^2.0.1" }, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/cssstyle/node_modules/cssom": { - "version": "0.3.8", + "node_modules/cypress/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT" + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } }, - "node_modules/csstype": { - "version": "3.0.11", - "license": "MIT" + "node_modules/cypress/node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cypress/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/cypress/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/cypress/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cypress/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cypress/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } }, "node_modules/data-urls": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz", + "integrity": "sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==", "dev": true, - "license": "MIT", "dependencies": { "abab": "^2.0.3", "whatwg-mimetype": "^2.3.0", @@ -7599,9 +8671,16 @@ "node": ">=10" } }, + "node_modules/dayjs": { + "version": "1.11.5", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.5.tgz", + "integrity": "sha512-CAdX5Q3YW3Gclyo5Vpqkgpj8fSdLQcRuzfX6mC6Phy0nfJ0eGYOeS7m4mt2plDWLAtA4TqTakvbboHvUxfe4iA==", + "dev": true + }, "node_modules/debug": { "version": "4.3.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dependencies": { "ms": "2.1.2" }, @@ -7615,24 +8694,20 @@ } }, "node_modules/decimal.js": { - "version": "10.3.1", - "dev": true, - "license": "MIT" + "version": "10.4.2", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.2.tgz", + "integrity": "sha512-ic1yEvwT6GuvaYwBLLY6/aFFgjZdySKTE8en/fkU3QICTmRtgtSlFn0u0BXN06InZwtfCelR7j8LRiDI/02iGA==", + "dev": true }, "node_modules/decko": { - "version": "1.2.0" - }, - "node_modules/decode-uri-component": { - "version": "0.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10" - } + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decko/-/decko-1.2.0.tgz", + "integrity": "sha512-m8FnyHXV1QX+S1cl+KPFDIl6NMkxtKsy6+U/aYyjrOqWMuwAwYWu7ePqrsUHtDR5Y8Yk2pi/KIDSgF+vT4cPOQ==" }, "node_modules/decompress-response": { "version": "6.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", "dependencies": { "mimic-response": "^3.1.0" }, @@ -7645,31 +8720,36 @@ }, "node_modules/dedent": { "version": "0.7.0", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==", + "dev": true }, "node_modules/deep-extend": { "version": "0.6.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", "engines": { "node": ">=4.0.0" } }, "node_modules/deep-is": { "version": "0.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true }, "node_modules/deepmerge": { "version": "4.2.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", + "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==", "engines": { "node": ">=0.10.0" } }, "node_modules/default-gateway": { "version": "6.0.3", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", + "integrity": "sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==", "dependencies": { "execa": "^5.0.0" }, @@ -7677,20 +8757,64 @@ "node": ">= 10" } }, + "node_modules/default-gateway/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/default-gateway/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-gateway/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "engines": { + "node": ">=10.17.0" + } + }, "node_modules/defer-to-connect": { "version": "1.1.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", + "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==" }, "node_modules/define-lazy-prop": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", "engines": { "node": ">=8" } }, "node_modules/define-properties": { "version": "1.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", "dependencies": { "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" @@ -7703,8 +8827,9 @@ } }, "node_modules/del": { - "version": "6.0.0", - "license": "MIT", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz", + "integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==", "dependencies": { "globby": "^11.0.1", "graceful-fs": "^4.2.4", @@ -7724,26 +8849,24 @@ }, "node_modules/delayed-stream": { "version": "1.0.0", - "dev": true, - "license": "MIT", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", "engines": { "node": ">=0.4.0" } }, - "node_modules/delegates": { - "version": "1.0.0", - "license": "MIT" - }, "node_modules/depd": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", "engines": { "node": ">= 0.8" } }, "node_modules/des.js": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz", + "integrity": "sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==", "dependencies": { "inherits": "^2.0.1", "minimalistic-assert": "^1.0.0" @@ -7751,7 +8874,8 @@ }, "node_modules/destroy": { "version": "1.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", "engines": { "node": ">= 0.8", "npm": "1.2.8000 || >= 1.4.16" @@ -7759,7 +8883,8 @@ }, "node_modules/detab": { "version": "2.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/detab/-/detab-2.0.4.tgz", + "integrity": "sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g==", "dependencies": { "repeat-string": "^1.5.4" }, @@ -7770,41 +8895,43 @@ }, "node_modules/detect-libc": { "version": "2.0.1", - "license": "Apache-2.0", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", + "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", "engines": { "node": ">=8" } }, "node_modules/detect-newline": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/detect-node": { "version": "2.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", + "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" }, "node_modules/detect-port": { - "version": "1.3.0", - "license": "MIT", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.5.1.tgz", + "integrity": "sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ==", "dependencies": { "address": "^1.0.1", - "debug": "^2.6.0" + "debug": "4" }, "bin": { - "detect": "bin/detect-port", - "detect-port": "bin/detect-port" - }, - "engines": { - "node": ">= 4.2.1" + "detect": "bin/detect-port.js", + "detect-port": "bin/detect-port.js" } }, "node_modules/detect-port-alt": { "version": "1.1.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz", + "integrity": "sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==", "dependencies": { "address": "^1.0.1", "debug": "^2.6.0" @@ -7819,37 +8946,30 @@ }, "node_modules/detect-port-alt/node_modules/debug": { "version": "2.6.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/detect-port-alt/node_modules/ms": { "version": "2.0.0", - "license": "MIT" - }, - "node_modules/detect-port/node_modules/debug": { - "version": "2.6.9", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/detect-port/node_modules/ms": { - "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/diff-sequences": { - "version": "27.5.1", + "version": "29.2.0", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.2.0.tgz", + "integrity": "sha512-413SY5JpYeSBZxmenGEmCVQ8mCgtFJF0w9PROdaS6z987XC2Pd2GOKqOITLtMftmyFZqgtCOb/QA7/Z3ZXfzIw==", "dev": true, - "license": "MIT", "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/diffie-hellman": { "version": "5.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", + "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", "dependencies": { "bn.js": "^4.1.0", "miller-rabin": "^4.0.0", @@ -7858,11 +8978,13 @@ }, "node_modules/diffie-hellman/node_modules/bn.js": { "version": "4.12.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/dir-glob": { "version": "3.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", "dependencies": { "path-type": "^4.0.0" }, @@ -7872,11 +8994,13 @@ }, "node_modules/dns-equal": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", + "integrity": "sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg==" }, "node_modules/dns-packet": { - "version": "5.3.1", - "license": "MIT", + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.4.0.tgz", + "integrity": "sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g==", "dependencies": { "@leichtgewicht/ip-codec": "^2.0.1" }, @@ -7886,34 +9010,48 @@ }, "node_modules/dom-accessibility-api": { "version": "0.5.14", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz", + "integrity": "sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg==", + "dev": true }, "node_modules/dom-converter": { "version": "0.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", + "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", "dependencies": { "utila": "~0.4" } }, "node_modules/dom-serializer": { - "version": "1.4.1", - "license": "MIT", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "dependencies": { - "domelementtype": "^2.0.1", - "domhandler": "^4.2.0", - "entities": "^2.0.0" + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" }, "funding": { "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, "node_modules/dom-walk": { - "version": "0.1.2" + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", + "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" + }, + "node_modules/dom7": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/dom7/-/dom7-4.0.4.tgz", + "integrity": "sha512-DSSgBzQ4rJWQp1u6o+3FVwMNnT5bzQbMb+o31TjYYeRi05uAcpF8koxdfzeoe5ElzPmua7W7N28YJhF7iEKqIw==", + "dependencies": { + "ssr-window": "^4.0.0" + } }, "node_modules/domain-browser": { "version": "4.22.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-4.22.0.tgz", + "integrity": "sha512-IGBwjF7tNk3cwypFNH/7bfzBcgSCbaMOD3GsaY1AU/JRrnHnYgEM0+9kQt52iZxjNsjBtJYtao146V+f8jFZNw==", "engines": { "node": ">=10" }, @@ -7923,18 +9061,20 @@ }, "node_modules/domelementtype": { "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/fb55" } - ], - "license": "BSD-2-Clause" + ] }, "node_modules/domexception": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz", + "integrity": "sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==", "dev": true, - "license": "MIT", "dependencies": { "webidl-conversions": "^5.0.0" }, @@ -7944,17 +9084,19 @@ }, "node_modules/domexception/node_modules/webidl-conversions": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", + "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", "dev": true, - "license": "BSD-2-Clause", "engines": { "node": ">=8" } }, "node_modules/domhandler": { - "version": "4.3.1", - "license": "BSD-2-Clause", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "dependencies": { - "domelementtype": "^2.2.0" + "domelementtype": "^2.3.0" }, "engines": { "node": ">= 4" @@ -7964,16 +9106,18 @@ } }, "node_modules/dompurify": { - "version": "2.3.6", - "license": "(MPL-2.0 OR Apache-2.0)" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.4.0.tgz", + "integrity": "sha512-Be9tbQMZds4a3C6xTmz68NlMfeONA//4dOavl/1rNw50E+/QO0KVpbcU0PcaW0nsQxurXls9ZocqFxk8R2mWEA==" }, "node_modules/domutils": { - "version": "2.8.0", - "license": "BSD-2-Clause", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", + "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", "dependencies": { - "dom-serializer": "^1.0.1", - "domelementtype": "^2.2.0", - "domhandler": "^4.2.0" + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.1" }, "funding": { "url": "https://github.com/fb55/domutils?sponsor=1" @@ -7981,7 +9125,8 @@ }, "node_modules/dot-case": { "version": "3.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" @@ -7989,7 +9134,8 @@ }, "node_modules/dot-prop": { "version": "5.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", "dependencies": { "is-obj": "^2.0.0" }, @@ -7999,42 +9145,60 @@ }, "node_modules/dot-prop/node_modules/is-obj": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", "engines": { "node": ">=8" } }, "node_modules/dotenv": { "version": "10.0.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-10.0.0.tgz", + "integrity": "sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==", "dev": true, - "license": "BSD-2-Clause", "engines": { "node": ">=10" } }, "node_modules/duplexer": { "version": "0.1.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", + "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" }, "node_modules/duplexer3": { - "version": "0.1.4", - "license": "BSD-3-Clause" + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz", + "integrity": "sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==" }, "node_modules/eastasianwidth": { "version": "0.2.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "node_modules/ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", + "dev": true, + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } }, "node_modules/ee-first": { "version": "1.1.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "node_modules/electron-to-chromium": { - "version": "1.4.129", - "license": "ISC" + "version": "1.4.284", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz", + "integrity": "sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA==" }, "node_modules/elliptic": { "version": "6.5.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", "dependencies": { "bn.js": "^4.11.9", "brorand": "^1.1.0", @@ -8047,12 +9211,14 @@ }, "node_modules/elliptic/node_modules/bn.js": { "version": "4.12.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/emittery": { "version": "0.8.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.8.1.tgz", + "integrity": "sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg==", "dev": true, - "license": "MIT", "engines": { "node": ">=10" }, @@ -8062,18 +9228,21 @@ }, "node_modules/emoji-regex": { "version": "9.2.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" }, "node_modules/emojis-list": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", "engines": { "node": ">= 4" } }, "node_modules/emoticon": { "version": "3.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/emoticon/-/emoticon-3.2.0.tgz", + "integrity": "sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -8081,21 +9250,24 @@ }, "node_modules/encodeurl": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", "engines": { "node": ">= 0.8" } }, "node_modules/end-of-stream": { "version": "1.4.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dependencies": { "once": "^1.4.0" } }, "node_modules/enhanced-resolve": { - "version": "5.9.3", - "license": "MIT", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz", + "integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==", "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -8104,44 +9276,66 @@ "node": ">=10.13.0" } }, + "node_modules/enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "dependencies": { + "ansi-colors": "^4.1.1" + }, + "engines": { + "node": ">=8.6" + } + }, "node_modules/entities": { - "version": "2.2.0", - "license": "BSD-2-Clause", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz", + "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==", + "engines": { + "node": ">=0.12" + }, "funding": { "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/error-ex": { "version": "1.3.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", "dependencies": { "is-arrayish": "^0.2.1" } }, "node_modules/es-abstract": { - "version": "1.19.5", - "license": "MIT", + "version": "1.20.4", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.4.tgz", + "integrity": "sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA==", "dependencies": { "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", - "get-intrinsic": "^1.1.1", + "function.prototype.name": "^1.1.5", + "get-intrinsic": "^1.1.3", "get-symbol-description": "^1.0.0", "has": "^1.0.3", + "has-property-descriptors": "^1.0.0", "has-symbols": "^1.0.3", "internal-slot": "^1.0.3", - "is-callable": "^1.2.4", + "is-callable": "^1.2.7", "is-negative-zero": "^2.0.2", "is-regex": "^1.1.4", "is-shared-array-buffer": "^1.0.2", "is-string": "^1.0.7", "is-weakref": "^1.0.2", - "object-inspect": "^1.12.0", + "object-inspect": "^1.12.2", "object-keys": "^1.1.1", - "object.assign": "^4.1.2", - "string.prototype.trimend": "^1.0.4", - "string.prototype.trimstart": "^1.0.4", - "unbox-primitive": "^1.0.1" + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.4.3", + "safe-regex-test": "^1.0.0", + "string.prototype.trimend": "^1.0.5", + "string.prototype.trimstart": "^1.0.5", + "unbox-primitive": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -8152,11 +9346,13 @@ }, "node_modules/es-module-lexer": { "version": "0.9.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", + "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==" }, "node_modules/es-to-primitive": { "version": "1.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", "dependencies": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -8171,41 +9367,48 @@ }, "node_modules/es6-object-assign": { "version": "1.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/es6-object-assign/-/es6-object-assign-1.1.0.tgz", + "integrity": "sha512-MEl9uirslVwqQU369iHNWZXsI8yaZYGg/D65aOgZkeyFJwHYSxilf7rQzXKI7DdDuBPrBXbfk3sl9hJhmd5AUw==" }, "node_modules/es6-promise": { "version": "3.3.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", + "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==" }, "node_modules/escalade": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", "engines": { "node": ">=6" } }, "node_modules/escape-goat": { "version": "2.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz", + "integrity": "sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==", "engines": { "node": ">=8" } }, "node_modules/escape-html": { "version": "1.0.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, "node_modules/escape-string-regexp": { "version": "1.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "engines": { "node": ">=0.8.0" } }, "node_modules/escodegen": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", + "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { "esprima": "^4.0.1", "estraverse": "^5.2.0", @@ -8225,7 +9428,8 @@ }, "node_modules/eslint-scope": { "version": "5.1.1", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" @@ -8236,14 +9440,16 @@ }, "node_modules/eslint-scope/node_modules/estraverse": { "version": "4.3.0", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", "engines": { "node": ">=4.0" } }, "node_modules/esprima": { "version": "4.0.1", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" @@ -8254,7 +9460,8 @@ }, "node_modules/esrecurse": { "version": "4.3.0", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dependencies": { "estraverse": "^5.2.0" }, @@ -8264,21 +9471,24 @@ }, "node_modules/estraverse": { "version": "5.3.0", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "engines": { "node": ">=4.0" } }, "node_modules/esutils": { "version": "2.0.3", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "engines": { "node": ">=0.10.0" } }, "node_modules/eta": { "version": "1.12.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/eta/-/eta-1.12.3.tgz", + "integrity": "sha512-qHixwbDLtekO/d51Yr4glcaUJCIjGVJyTzuqV4GPlgZo1YpgOKG+avQynErZIYrfM6JIJdtiG2Kox8tbb+DoGg==", "engines": { "node": ">=6.0.0" }, @@ -8288,13 +9498,16 @@ }, "node_modules/etag": { "version": "1.8.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", "engines": { "node": ">= 0.6" } }, "node_modules/eval": { "version": "0.1.8", + "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", + "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", "dependencies": { "@types/node": "*", "require-like": ">= 0.1.1" @@ -8303,37 +9516,48 @@ "node": ">= 0.8" } }, + "node_modules/eventemitter2": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.7.tgz", + "integrity": "sha512-tYUSVOGeQPKt/eC1ABfhHy5Xd96N3oIijJvN3O9+TsC28T5V9yX9oEfEK5faP0EFSNVOG97qtAS68GBrQB2hDg==", + "dev": true + }, "node_modules/eventemitter3": { "version": "4.0.7", - "license": "MIT" + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, "node_modules/events": { "version": "3.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", "engines": { "node": ">=0.8.x" } }, "node_modules/evp_bytestokey": { "version": "1.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", + "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", "dependencies": { "md5.js": "^1.3.4", "safe-buffer": "^5.1.1" } }, "node_modules/execa": { - "version": "5.1.1", - "license": "MIT", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", + "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", + "dev": true, "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "signal-exit": "^3.0.2", "strip-final-newline": "^2.0.0" }, "engines": { @@ -8343,11 +9567,27 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, + "node_modules/executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dev": true, + "dependencies": { + "pify": "^2.2.0" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/exif-parser": { - "version": "0.1.12" + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz", + "integrity": "sha512-c2bQfLNbMzLPmzQuOr8fy0csy84WmwnER81W88DzTp9CYNPJ6yzOj2EZAh9pywYpqHnshVLHQJ8WzldAyfY+Iw==" }, "node_modules/exit": { "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", "dev": true, "engines": { "node": ">= 0.8.0" @@ -8355,32 +9595,36 @@ }, "node_modules/expand-template": { "version": "2.0.3", - "license": "(MIT OR WTFPL)", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", "engines": { "node": ">=6" } }, "node_modules/expect": { - "version": "27.5.1", + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.2.1.tgz", + "integrity": "sha512-BJtA754Fba0YWRWHgjKUMTA3ltWarKgITXHQnbZ2mTxTXC4yMQlR0FI7HkB3fJYkhWBf4qjNiqvg3LDtXCcVRQ==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/types": "^27.5.1", - "jest-get-type": "^27.5.1", - "jest-matcher-utils": "^27.5.1", - "jest-message-util": "^27.5.1" + "@jest/expect-utils": "^29.2.1", + "jest-get-type": "^29.2.0", + "jest-matcher-utils": "^29.2.1", + "jest-message-util": "^29.2.1", + "jest-util": "^29.2.1" }, "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/express": { - "version": "4.18.1", - "license": "MIT", + "version": "4.18.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", + "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.0", + "body-parser": "1.20.1", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.5.0", @@ -8399,7 +9643,7 @@ "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", - "qs": "6.10.3", + "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", @@ -8416,11 +9660,13 @@ }, "node_modules/express/node_modules/array-flatten": { "version": "1.1.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, "node_modules/express/node_modules/content-disposition": { "version": "0.5.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", "dependencies": { "safe-buffer": "5.2.1" }, @@ -8430,51 +9676,53 @@ }, "node_modules/express/node_modules/debug": { "version": "2.6.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/express/node_modules/ms": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/express/node_modules/path-to-regexp": { "version": "0.1.7", - "license": "MIT" + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, - "node_modules/express/node_modules/range-parser": { - "version": "1.2.1", - "license": "MIT", + "node_modules/express/node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/safe-buffer": { - "version": "5.2.1", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/express/node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } }, "node_modules/extend": { "version": "3.0.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "node_modules/extend-shallow": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", "dependencies": { "is-extendable": "^0.1.0" }, @@ -8482,13 +9730,44 @@ "node": ">=0.10.0" } }, + "node_modules/extract-zip": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" + }, + "bin": { + "extract-zip": "cli.js" + }, + "engines": { + "node": ">= 10.17.0" + }, + "optionalDependencies": { + "@types/yauzl": "^2.9.1" + } + }, + "node_modules/extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", + "dev": true, + "engines": [ + "node >=0.6.0" + ] + }, "node_modules/fast-deep-equal": { "version": "3.1.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, "node_modules/fast-glob": { - "version": "3.2.11", - "license": "MIT", + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", @@ -8502,38 +9781,45 @@ }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, "node_modules/fast-levenshtein": { "version": "2.0.6", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true }, "node_modules/fast-safe-stringify": { "version": "2.1.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, "node_modules/fast-url-parser": { "version": "1.1.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", + "integrity": "sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==", "dependencies": { "punycode": "^1.3.2" } }, "node_modules/fast-url-parser/node_modules/punycode": { "version": "1.4.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" }, "node_modules/fastq": { "version": "1.13.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", "dependencies": { "reusify": "^1.0.4" } }, "node_modules/faye-websocket": { "version": "0.11.4", - "license": "Apache-2.0", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", + "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", "dependencies": { "websocket-driver": ">=0.5.1" }, @@ -8542,23 +9828,26 @@ } }, "node_modules/fb-watchman": { - "version": "2.0.1", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", "dev": true, - "license": "Apache-2.0", "dependencies": { "bser": "2.1.1" } }, "node_modules/fbemitter": { "version": "3.0.0", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/fbemitter/-/fbemitter-3.0.0.tgz", + "integrity": "sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw==", "dependencies": { "fbjs": "^3.0.0" } }, "node_modules/fbjs": { "version": "3.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-3.0.4.tgz", + "integrity": "sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ==", "dependencies": { "cross-fetch": "^3.1.5", "fbjs-css-vars": "^1.0.0", @@ -8571,11 +9860,22 @@ }, "node_modules/fbjs-css-vars": { "version": "1.0.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz", + "integrity": "sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==" + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "dependencies": { + "pend": "~1.2.0" + } }, "node_modules/feed": { "version": "4.2.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz", + "integrity": "sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==", "dependencies": { "xml-js": "^1.6.11" }, @@ -8583,9 +9883,25 @@ "node": ">=0.4.0" } }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/file-loader": { "version": "6.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", + "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", "dependencies": { "loader-utils": "^2.0.0", "schema-utils": "^3.0.0" @@ -8603,7 +9919,8 @@ }, "node_modules/file-loader/node_modules/schema-utils": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", @@ -8619,21 +9936,24 @@ }, "node_modules/file-type": { "version": "9.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-9.0.0.tgz", + "integrity": "sha512-Qe/5NJrgIOlwijpq3B7BEpzPFcgzggOTagZmkXQY4LA6bsXKTUstK7Wp12lEJ/mLKTpvIZxmIuRcLYWT6ov9lw==", "engines": { "node": ">=6" } }, "node_modules/filesize": { "version": "8.0.7", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz", + "integrity": "sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ==", "engines": { "node": ">= 0.4.0" } }, "node_modules/fill-range": { "version": "7.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "dependencies": { "to-regex-range": "^5.0.1" }, @@ -8643,14 +9963,16 @@ }, "node_modules/filter-obj": { "version": "2.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-2.0.2.tgz", + "integrity": "sha512-lO3ttPjHZRfjMcxWKb1j1eDhTFsu4meeR3lnMcnBFhk6RuLhvEiuALu2TlfL310ph4lCYYwgF/ElIjdP739tdg==", "engines": { "node": ">=8" } }, "node_modules/finalhandler": { "version": "1.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", + "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", @@ -8666,18 +9988,21 @@ }, "node_modules/finalhandler/node_modules/debug": { "version": "2.6.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/finalhandler/node_modules/ms": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/find-cache-dir": { "version": "3.3.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", + "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", "dependencies": { "commondir": "^1.0.1", "make-dir": "^3.0.2", @@ -8692,7 +10017,8 @@ }, "node_modules/find-up": { "version": "4.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -8703,7 +10029,8 @@ }, "node_modules/flux": { "version": "4.0.3", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/flux/-/flux-4.0.3.tgz", + "integrity": "sha512-yKAbrp7JhZhj6uiT1FTuVMlIAT1J4jqEyBpFApi1kxpGZCvacMVc/t1pMQyotqHhAgvoE3bNvAykhCo2CLjnYw==", "dependencies": { "fbemitter": "^3.0.0", "fbjs": "^3.0.1" @@ -8713,14 +10040,15 @@ } }, "node_modules/follow-redirects": { - "version": "1.14.9", + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", + "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", "funding": [ { "type": "individual", "url": "https://github.com/sponsors/RubenVerborgh" } ], - "license": "MIT", "engines": { "node": ">=4.0" }, @@ -8730,13 +10058,32 @@ } } }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dependencies": { + "is-callable": "^1.1.3" + } + }, "node_modules/foreach": { - "version": "2.0.5", - "license": "MIT" + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.6.tgz", + "integrity": "sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==" + }, + "node_modules/forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", + "dev": true, + "engines": { + "node": "*" + } }, "node_modules/fork-ts-checker-webpack-plugin": { "version": "6.5.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz", + "integrity": "sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA==", "dependencies": { "@babel/code-frame": "^7.8.3", "@types/json-schema": "^7.0.5", @@ -8773,7 +10120,8 @@ }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/ansi-styles": { "version": "4.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { "color-convert": "^2.0.1" }, @@ -8786,7 +10134,8 @@ }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/chalk": { "version": "4.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -8800,7 +10149,8 @@ }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, @@ -8810,11 +10160,13 @@ }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/cosmiconfig": { "version": "6.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", + "integrity": "sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==", "dependencies": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.1.0", @@ -8828,7 +10180,8 @@ }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/fs-extra": { "version": "9.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", "dependencies": { "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", @@ -8841,14 +10194,16 @@ }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/has-flag": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/schema-utils": { "version": "2.7.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz", + "integrity": "sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A==", "dependencies": { "@types/json-schema": "^7.0.4", "ajv": "^6.12.2", @@ -8864,7 +10219,8 @@ }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/supports-color": { "version": "7.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { "has-flag": "^4.0.0" }, @@ -8874,15 +10230,16 @@ }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/tapable": { "version": "1.1.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", + "integrity": "sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==", "engines": { "node": ">=6" } }, "node_modules/form-data": { - "version": "3.0.1", - "dev": true, - "license": "MIT", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -8894,14 +10251,16 @@ }, "node_modules/forwarded": { "version": "0.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", "engines": { "node": ">= 0.6" } }, "node_modules/fraction.js": { "version": "4.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", + "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==", "engines": { "node": "*" }, @@ -8912,22 +10271,26 @@ }, "node_modules/fresh": { "version": "0.5.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", "engines": { "node": ">= 0.6" } }, "node_modules/fs": { "version": "0.0.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.2.tgz", + "integrity": "sha512-YAiVokMCrSIFZiroB1oz51hPiPRVcUtSa4x2U5RYXyhS9VAPdiFigKbPTnOSq7XY8wd3FIVPYmXpo5lMzFmxgg==" }, "node_modules/fs-constants": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "node_modules/fs-extra": { "version": "10.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", @@ -8939,15 +10302,19 @@ }, "node_modules/fs-monkey": { "version": "1.0.3", - "license": "Unlicense" + "resolved": "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz", + "integrity": "sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q==" }, "node_modules/fs.realpath": { "version": "1.0.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "node_modules/fsevents": { "version": "2.3.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "hasInstallScript": true, "optional": true, "os": [ "darwin" @@ -8958,82 +10325,58 @@ }, "node_modules/function-bind": { "version": "1.1.1", - "license": "MIT" - }, - "node_modules/gauge": { - "version": "2.7.4", - "license": "ISC", - "dependencies": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } - }, - "node_modules/gauge/node_modules/ansi-regex": { - "version": "2.1.1", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, - "node_modules/gauge/node_modules/is-fullwidth-code-point": { - "version": "1.0.0", - "license": "MIT", + "node_modules/function.prototype.name": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", + "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", "dependencies": { - "number-is-nan": "^1.0.0" + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0", + "functions-have-names": "^1.2.2" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gauge/node_modules/string-width": { - "version": "1.0.2", - "license": "MIT", - "dependencies": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gauge/node_modules/strip-ansi": { - "version": "3.0.1", - "license": "MIT", - "dependencies": { - "ansi-regex": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/gensync": { "version": "1.0.0-beta.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "engines": { "node": ">=6.9.0" } }, "node_modules/get-caller-file": { "version": "2.0.5", - "license": "ISC", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "engines": { "node": "6.* || 8.* || >= 10.*" } }, "node_modules/get-intrinsic": { - "version": "1.1.1", - "license": "MIT", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", "dependencies": { "function-bind": "^1.1.1", "has": "^1.0.3", - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.3" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -9041,21 +10384,27 @@ }, "node_modules/get-own-enumerable-property-symbols": { "version": "3.0.2", - "license": "ISC" + "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", + "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==" }, "node_modules/get-package-type": { "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true, - "license": "MIT", "engines": { "node": ">=8.0.0" } }, "node_modules/get-stream": { - "version": "6.0.1", - "license": "MIT", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dependencies": { + "pump": "^3.0.0" + }, "engines": { - "node": ">=10" + "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -9063,7 +10412,8 @@ }, "node_modules/get-symbol-description": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", "dependencies": { "call-bind": "^1.0.2", "get-intrinsic": "^1.1.1" @@ -9075,9 +10425,28 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/getos": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", + "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", + "dev": true, + "dependencies": { + "async": "^3.2.0" + } + }, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0" + } + }, "node_modules/gifwrap": { "version": "0.9.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/gifwrap/-/gifwrap-0.9.4.tgz", + "integrity": "sha512-MDMwbhASQuVeD4JKd1fKgNgCRL3fGqMM4WaqpNhWO0JiMOAjbQdumbs4BbBZEy9/M00EHEjKN3HieVhCUlwjeQ==", "dependencies": { "image-q": "^4.0.0", "omggif": "^1.0.10" @@ -9085,20 +10454,23 @@ }, "node_modules/github-from-package": { "version": "0.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==" }, "node_modules/github-slugger": { "version": "1.4.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.4.0.tgz", + "integrity": "sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ==" }, "node_modules/glob": { - "version": "7.2.0", - "license": "ISC", + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", + "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" }, @@ -9111,7 +10483,8 @@ }, "node_modules/glob-parent": { "version": "5.1.2", - "license": "ISC", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dependencies": { "is-glob": "^4.0.1" }, @@ -9121,11 +10494,13 @@ }, "node_modules/glob-to-regexp": { "version": "0.4.1", - "license": "BSD-2-Clause" + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" }, "node_modules/global": { "version": "4.4.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", + "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", "dependencies": { "min-document": "^2.19.0", "process": "^0.11.10" @@ -9133,7 +10508,8 @@ }, "node_modules/global-dirs": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", + "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", "dependencies": { "ini": "2.0.0" }, @@ -9144,16 +10520,10 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/global-dirs/node_modules/ini": { - "version": "2.0.0", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, "node_modules/global-modules": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", + "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", "dependencies": { "global-prefix": "^3.0.0" }, @@ -9163,7 +10533,8 @@ }, "node_modules/global-prefix": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", + "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", "dependencies": { "ini": "^1.3.5", "kind-of": "^6.0.2", @@ -9173,9 +10544,15 @@ "node": ">=6" } }, + "node_modules/global-prefix/node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, "node_modules/global-prefix/node_modules/which": { "version": "1.3.1", - "license": "ISC", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", "dependencies": { "isexe": "^2.0.0" }, @@ -9185,14 +10562,16 @@ }, "node_modules/globals": { "version": "11.12.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "engines": { "node": ">=4" } }, "node_modules/globby": { "version": "11.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "dependencies": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", @@ -9210,7 +10589,8 @@ }, "node_modules/got": { "version": "9.6.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", + "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", "dependencies": { "@sindresorhus/is": "^0.14.0", "@szmarczak/http-timer": "^1.1.2", @@ -9230,7 +10610,8 @@ }, "node_modules/got/node_modules/decompress-response": { "version": "3.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", "dependencies": { "mimic-response": "^1.0.0" }, @@ -9240,7 +10621,8 @@ }, "node_modules/got/node_modules/get-stream": { "version": "4.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", "dependencies": { "pump": "^3.0.0" }, @@ -9250,18 +10632,21 @@ }, "node_modules/got/node_modules/mimic-response": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", "engines": { "node": ">=4" } }, "node_modules/graceful-fs": { "version": "4.2.10", - "license": "ISC" + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" }, "node_modules/gray-matter": { "version": "4.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", + "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", "dependencies": { "js-yaml": "^3.13.1", "kind-of": "^6.0.2", @@ -9274,14 +10659,16 @@ }, "node_modules/gray-matter/node_modules/argparse": { "version": "1.0.10", - "license": "MIT", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dependencies": { "sprintf-js": "~1.0.2" } }, "node_modules/gray-matter/node_modules/js-yaml": { "version": "3.14.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -9292,7 +10679,8 @@ }, "node_modules/gzip-size": { "version": "6.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz", + "integrity": "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==", "dependencies": { "duplexer": "^0.1.2" }, @@ -9305,11 +10693,13 @@ }, "node_modules/handle-thing": { "version": "2.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", + "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==" }, "node_modules/has": { "version": "1.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", "dependencies": { "function-bind": "^1.1.1" }, @@ -9319,21 +10709,24 @@ }, "node_modules/has-bigints": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-flag": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", "engines": { "node": ">=4" } }, "node_modules/has-property-descriptors": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", "dependencies": { "get-intrinsic": "^1.1.1" }, @@ -9343,7 +10736,8 @@ }, "node_modules/has-symbols": { "version": "1.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", "engines": { "node": ">= 0.4" }, @@ -9353,7 +10747,8 @@ }, "node_modules/has-tostringtag": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", "dependencies": { "has-symbols": "^1.0.2" }, @@ -9364,20 +10759,18 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-unicode": { - "version": "2.0.1", - "license": "ISC" - }, "node_modules/has-yarn": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz", + "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==", "engines": { "node": ">=8" } }, "node_modules/hash-base": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", "dependencies": { "inherits": "^2.0.4", "readable-stream": "^3.6.0", @@ -9387,27 +10780,10 @@ "node": ">=4" } }, - "node_modules/hash-base/node_modules/safe-buffer": { - "version": "5.2.1", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, "node_modules/hash.js": { "version": "1.1.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", "dependencies": { "inherits": "^2.0.3", "minimalistic-assert": "^1.0.1" @@ -9415,7 +10791,8 @@ }, "node_modules/hast-to-hyperscript": { "version": "9.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz", + "integrity": "sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==", "dependencies": { "@types/unist": "^2.0.3", "comma-separated-tokens": "^1.0.0", @@ -9432,7 +10809,8 @@ }, "node_modules/hast-util-from-parse5": { "version": "6.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz", + "integrity": "sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==", "dependencies": { "@types/parse5": "^5.0.0", "hastscript": "^6.0.0", @@ -9446,9 +10824,19 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/hast-util-is-element": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz", + "integrity": "sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/hast-util-parse-selector": { "version": "2.2.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", + "integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -9456,7 +10844,8 @@ }, "node_modules/hast-util-raw": { "version": "6.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.0.1.tgz", + "integrity": "sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig==", "dependencies": { "@types/hast": "^2.0.0", "hast-util-from-parse5": "^6.0.0", @@ -9474,9 +10863,15 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/hast-util-raw/node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" + }, "node_modules/hast-util-to-parse5": { "version": "6.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", + "integrity": "sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==", "dependencies": { "hast-to-hyperscript": "^9.0.0", "property-information": "^5.0.0", @@ -9489,9 +10884,24 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/hast-util-to-text": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hast-util-to-text/-/hast-util-to-text-2.0.1.tgz", + "integrity": "sha512-8nsgCARfs6VkwH2jJU9b8LNTuR4700na+0h3PqCaEk4MAnMDeu5P0tP8mjk9LLNGxIeQRLbiDbZVw6rku+pYsQ==", + "dependencies": { + "hast-util-is-element": "^1.0.0", + "repeat-string": "^1.0.0", + "unist-util-find-after": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/hastscript": { "version": "6.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", + "integrity": "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==", "dependencies": { "@types/hast": "^2.0.0", "comma-separated-tokens": "^1.0.0", @@ -9506,14 +10916,16 @@ }, "node_modules/he": { "version": "1.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "bin": { "he": "bin/he" } }, "node_modules/history": { "version": "4.10.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", + "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", "dependencies": { "@babel/runtime": "^7.1.2", "loose-envify": "^1.2.0", @@ -9525,7 +10937,8 @@ }, "node_modules/hmac-drbg": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==", "dependencies": { "hash.js": "^1.0.3", "minimalistic-assert": "^1.0.0", @@ -9534,18 +10947,21 @@ }, "node_modules/hoist-non-react-statics": { "version": "3.3.2", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", "dependencies": { "react-is": "^16.7.0" } }, "node_modules/hoist-non-react-statics/node_modules/react-is": { "version": "16.13.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/hpack.js": { "version": "2.1.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", + "integrity": "sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==", "dependencies": { "inherits": "^2.0.1", "obuf": "^1.0.0", @@ -9555,11 +10971,13 @@ }, "node_modules/hpack.js/node_modules/isarray": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, "node_modules/hpack.js/node_modules/readable-stream": { "version": "2.3.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -9570,17 +10988,24 @@ "util-deprecate": "~1.0.1" } }, + "node_modules/hpack.js/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, "node_modules/hpack.js/node_modules/string_decoder": { "version": "1.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dependencies": { "safe-buffer": "~5.1.0" } }, "node_modules/html-encoding-sniffer": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", + "integrity": "sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==", "dev": true, - "license": "MIT", "dependencies": { "whatwg-encoding": "^1.0.5" }, @@ -9590,16 +11015,19 @@ }, "node_modules/html-entities": { "version": "2.3.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.3.3.tgz", + "integrity": "sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA==" }, "node_modules/html-escaper": { "version": "2.0.2", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true }, "node_modules/html-minifier-terser": { "version": "6.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", + "integrity": "sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==", "dependencies": { "camel-case": "^4.1.2", "clean-css": "^5.2.2", @@ -9618,14 +11046,16 @@ }, "node_modules/html-minifier-terser/node_modules/commander": { "version": "8.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", "engines": { "node": ">= 12" } }, "node_modules/html-tags": { "version": "3.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.2.0.tgz", + "integrity": "sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg==", "engines": { "node": ">=8" }, @@ -9635,7 +11065,8 @@ }, "node_modules/html-void-elements": { "version": "1.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz", + "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -9643,7 +11074,8 @@ }, "node_modules/html-webpack-plugin": { "version": "5.5.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz", + "integrity": "sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw==", "dependencies": { "@types/html-minifier-terser": "^6.0.0", "html-minifier-terser": "^6.0.2", @@ -9663,7 +11095,9 @@ } }, "node_modules/htmlparser2": { - "version": "6.1.0", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz", + "integrity": "sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==", "funding": [ "https://github.com/fb55/htmlparser2?sponsor=1", { @@ -9671,25 +11105,27 @@ "url": "https://github.com/sponsors/fb55" } ], - "license": "MIT", "dependencies": { - "domelementtype": "^2.0.1", - "domhandler": "^4.0.0", - "domutils": "^2.5.2", - "entities": "^2.0.0" + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "entities": "^4.3.0" } }, "node_modules/http-cache-semantics": { "version": "4.1.0", - "license": "BSD-2-Clause" + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", + "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==" }, "node_modules/http-deceiver": { "version": "1.2.7", - "license": "MIT" + "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", + "integrity": "sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==" }, "node_modules/http-errors": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", @@ -9702,12 +11138,14 @@ } }, "node_modules/http-parser-js": { - "version": "0.5.6", - "license": "MIT" + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz", + "integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==" }, "node_modules/http-proxy": { "version": "1.18.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", + "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", "dependencies": { "eventemitter3": "^4.0.0", "follow-redirects": "^1.0.0", @@ -9719,8 +11157,9 @@ }, "node_modules/http-proxy-agent": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", "dev": true, - "license": "MIT", "dependencies": { "@tootallnate/once": "1", "agent-base": "6", @@ -9732,7 +11171,8 @@ }, "node_modules/http-proxy-middleware": { "version": "2.0.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", + "integrity": "sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==", "dependencies": { "@types/http-proxy": "^1.17.8", "http-proxy": "^1.18.1", @@ -9754,7 +11194,8 @@ }, "node_modules/http-proxy-middleware/node_modules/is-plain-obj": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", + "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", "engines": { "node": ">=10" }, @@ -9762,18 +11203,35 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/http-signature": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" + }, + "engines": { + "node": ">=0.10" + } + }, "node_modules/http2-client": { "version": "1.3.5", - "license": "MIT" + "resolved": "https://registry.npmjs.org/http2-client/-/http2-client-1.3.5.tgz", + "integrity": "sha512-EC2utToWl4RKfs5zd36Mxq7nzHHBuomZboI0yYL6Y0RmBgT7Sgkq4rQ0ezFTYoIsSs7Tm9SJe+o2FcAg6GBhGA==" }, "node_modules/https-browserify": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz", + "integrity": "sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg==" }, "node_modules/https-proxy-agent": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "dev": true, - "license": "MIT", "dependencies": { "agent-base": "6", "debug": "4" @@ -9783,15 +11241,18 @@ } }, "node_modules/human-signals": { - "version": "2.1.0", - "license": "Apache-2.0", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", + "dev": true, "engines": { - "node": ">=10.17.0" + "node": ">=8.12.0" } }, "node_modules/iconv-lite": { "version": "0.4.24", - "license": "MIT", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "dependencies": { "safer-buffer": ">= 2.1.2 < 3" }, @@ -9801,8 +11262,9 @@ }, "node_modules/icss-utils": { "version": "4.1.1", + "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-4.1.1.tgz", + "integrity": "sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA==", "dev": true, - "license": "ISC", "dependencies": { "postcss": "^7.0.14" }, @@ -9812,13 +11274,15 @@ }, "node_modules/icss-utils/node_modules/picocolors": { "version": "0.2.1", - "dev": true, - "license": "ISC" + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", + "dev": true }, "node_modules/icss-utils/node_modules/postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, - "license": "MIT", "dependencies": { "picocolors": "^0.2.1", "source-map": "^0.6.1" @@ -9833,6 +11297,8 @@ }, "node_modules/ieee754": { "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", "funding": [ { "type": "github", @@ -9846,30 +11312,33 @@ "type": "consulting", "url": "https://feross.org/support" } - ], - "license": "BSD-3-Clause" + ] }, "node_modules/ignore": { "version": "5.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", "engines": { "node": ">= 4" } }, "node_modules/image-q": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/image-q/-/image-q-4.0.0.tgz", + "integrity": "sha512-PfJGVgIfKQJuq3s0tTDOKtztksibuUEbJQIYT3by6wctQo+Rdlh7ef4evJ5NCdxY4CfMbvFkocEwbl4BF8RlJw==", "dependencies": { "@types/node": "16.9.1" } }, "node_modules/image-q/node_modules/@types/node": { "version": "16.9.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz", + "integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g==" }, "node_modules/image-size": { - "version": "1.0.1", - "license": "MIT", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/image-size/-/image-size-1.0.2.tgz", + "integrity": "sha512-xfOoWjceHntRb3qFCrh5ZFORYH8XCdYpASltMhZ/Q0KZiOwjdE/Yl2QCiWdwD+lygV5bMCvauzgu5PxBX/Yerg==", "dependencies": { "queue": "6.0.2" }, @@ -9877,12 +11346,13 @@ "image-size": "bin/image-size.js" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" } }, "node_modules/immer": { - "version": "9.0.12", - "license": "MIT", + "version": "9.0.15", + "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.15.tgz", + "integrity": "sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ==", "funding": { "type": "opencollective", "url": "https://opencollective.com/immer" @@ -9890,7 +11360,8 @@ }, "node_modules/import-fresh": { "version": "3.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -9904,15 +11375,17 @@ }, "node_modules/import-lazy": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", + "integrity": "sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A==", "engines": { "node": ">=4" } }, "node_modules/import-local": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", "dev": true, - "license": "MIT", "dependencies": { "pkg-dir": "^4.2.0", "resolve-cwd": "^3.0.0" @@ -9929,28 +11402,32 @@ }, "node_modules/imurmurhash": { "version": "0.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "engines": { "node": ">=0.8.19" } }, "node_modules/indent-string": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", "engines": { "node": ">=8" } }, "node_modules/infima": { "version": "0.2.0-alpha.37", - "license": "MIT", + "resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.37.tgz", + "integrity": "sha512-4GX7Baw+/lwS4PPW/UJNY89tWSvYG1DL6baKVdpK6mC593iRgMssxNtORMTFArLPJ/A/lzsGhRmx+z6MaMxj0Q==", "engines": { "node": ">=12" } }, "node_modules/inflight": { "version": "1.0.6", - "license": "ISC", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "dependencies": { "once": "^1.3.0", "wrappy": "1" @@ -9958,19 +11435,26 @@ }, "node_modules/inherits": { "version": "2.0.4", - "license": "ISC" + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "node_modules/ini": { - "version": "1.3.8", - "license": "ISC" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "engines": { + "node": ">=10" + } }, "node_modules/inline-style-parser": { "version": "0.1.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", + "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" }, "node_modules/internal-slot": { "version": "1.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", + "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", "dependencies": { "get-intrinsic": "^1.1.0", "has": "^1.0.3", @@ -9982,28 +11466,32 @@ }, "node_modules/interpret": { "version": "1.4.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", "engines": { "node": ">= 0.10" } }, "node_modules/invariant": { "version": "2.2.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", + "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", "dependencies": { "loose-envify": "^1.0.0" } }, "node_modules/ipaddr.js": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz", + "integrity": "sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng==", "engines": { "node": ">= 10" } }, "node_modules/is-alphabetical": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", + "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -10011,7 +11499,8 @@ }, "node_modules/is-alphanumerical": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", + "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", "dependencies": { "is-alphabetical": "^1.0.0", "is-decimal": "^1.0.0" @@ -10023,7 +11512,8 @@ }, "node_modules/is-arguments": { "version": "1.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -10037,11 +11527,13 @@ }, "node_modules/is-arrayish": { "version": "0.2.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==" }, "node_modules/is-bigint": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", "dependencies": { "has-bigints": "^1.0.1" }, @@ -10051,7 +11543,8 @@ }, "node_modules/is-binary-path": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "dependencies": { "binary-extensions": "^2.0.0" }, @@ -10061,7 +11554,8 @@ }, "node_modules/is-boolean-object": { "version": "1.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -10075,6 +11569,8 @@ }, "node_modules/is-buffer": { "version": "2.0.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", + "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", "funding": [ { "type": "github", @@ -10089,14 +11585,14 @@ "url": "https://feross.org/support" } ], - "license": "MIT", "engines": { "node": ">=4" } }, "node_modules/is-callable": { - "version": "1.2.4", - "license": "MIT", + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", "engines": { "node": ">= 0.4" }, @@ -10105,22 +11601,21 @@ } }, "node_modules/is-ci": { - "version": "2.0.0", - "license": "MIT", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dev": true, "dependencies": { - "ci-info": "^2.0.0" + "ci-info": "^3.2.0" }, "bin": { "is-ci": "bin.js" } }, - "node_modules/is-ci/node_modules/ci-info": { - "version": "2.0.0", - "license": "MIT" - }, "node_modules/is-core-module": { - "version": "2.9.0", - "license": "MIT", + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.10.0.tgz", + "integrity": "sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==", "dependencies": { "has": "^1.0.3" }, @@ -10130,7 +11625,8 @@ }, "node_modules/is-date-object": { "version": "1.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -10143,7 +11639,8 @@ }, "node_modules/is-decimal": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", + "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -10151,7 +11648,8 @@ }, "node_modules/is-docker": { "version": "2.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", "bin": { "is-docker": "cli.js" }, @@ -10164,40 +11662,46 @@ }, "node_modules/is-extendable": { "version": "0.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-extglob": { "version": "2.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "engines": { "node": ">=8" } }, "node_modules/is-function": { "version": "1.0.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.2.tgz", + "integrity": "sha512-lw7DUp0aWXYg+CBCN+JKkcE0Q2RayZnSvnZBlwgxHBQhqt5pZNVy4Ri7H9GmmXkdu7LUthszM+Tor1u/2iBcpQ==" }, "node_modules/is-generator-fn": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/is-generator-function": { "version": "1.0.10", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -10210,7 +11714,8 @@ }, "node_modules/is-glob": { "version": "4.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dependencies": { "is-extglob": "^2.1.1" }, @@ -10220,7 +11725,8 @@ }, "node_modules/is-hexadecimal": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", + "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -10228,7 +11734,8 @@ }, "node_modules/is-installed-globally": { "version": "0.4.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", "dependencies": { "global-dirs": "^3.0.0", "is-path-inside": "^3.0.2" @@ -10242,7 +11749,8 @@ }, "node_modules/is-nan": { "version": "1.3.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-nan/-/is-nan-1.3.2.tgz", + "integrity": "sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==", "dependencies": { "call-bind": "^1.0.0", "define-properties": "^1.1.3" @@ -10256,7 +11764,8 @@ }, "node_modules/is-negative-zero": { "version": "2.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", "engines": { "node": ">= 0.4" }, @@ -10266,7 +11775,8 @@ }, "node_modules/is-npm": { "version": "5.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz", + "integrity": "sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==", "engines": { "node": ">=10" }, @@ -10276,14 +11786,16 @@ }, "node_modules/is-number": { "version": "7.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "engines": { "node": ">=0.12.0" } }, "node_modules/is-number-object": { "version": "1.0.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -10296,35 +11808,40 @@ }, "node_modules/is-obj": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", + "integrity": "sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-path-cwd": { "version": "2.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", + "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==", "engines": { "node": ">=6" } }, "node_modules/is-path-inside": { "version": "3.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "engines": { "node": ">=8" } }, "node_modules/is-plain-obj": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", "engines": { "node": ">=8" } }, "node_modules/is-plain-object": { "version": "2.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", "dependencies": { "isobject": "^3.0.1" }, @@ -10334,12 +11851,14 @@ }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", + "dev": true }, "node_modules/is-regex": { "version": "1.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -10353,21 +11872,24 @@ }, "node_modules/is-regexp": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", + "integrity": "sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-root": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz", + "integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==", "engines": { "node": ">=6" } }, "node_modules/is-shared-array-buffer": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", "dependencies": { "call-bind": "^1.0.2" }, @@ -10377,7 +11899,8 @@ }, "node_modules/is-stream": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "engines": { "node": ">=8" }, @@ -10387,7 +11910,8 @@ }, "node_modules/is-string": { "version": "1.0.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -10400,7 +11924,8 @@ }, "node_modules/is-symbol": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dependencies": { "has-symbols": "^1.0.2" }, @@ -10412,13 +11937,14 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.8", - "license": "MIT", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.9.tgz", + "integrity": "sha512-kfrlnTTn8pZkfpJMUgYD7YZ3qzeJgWUn8XfVYBARc4wnmNOmLbmuuaAs3q5fvB0UJOn6yHAKaGTPM7d6ezoD/A==", "dependencies": { "available-typed-arrays": "^1.0.5", "call-bind": "^1.0.2", - "es-abstract": "^1.18.5", - "foreach": "^2.0.5", + "es-abstract": "^1.20.0", + "for-each": "^0.3.3", "has-tostringtag": "^1.0.0" }, "engines": { @@ -10430,11 +11956,25 @@ }, "node_modules/is-typedarray": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/is-weakref": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", "dependencies": { "call-bind": "^1.0.2" }, @@ -10444,7 +11984,8 @@ }, "node_modules/is-whitespace-character": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz", + "integrity": "sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -10452,7 +11993,8 @@ }, "node_modules/is-word-character": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", + "integrity": "sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -10460,7 +12002,8 @@ }, "node_modules/is-wsl": { "version": "2.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", "dependencies": { "is-docker": "^2.0.0" }, @@ -10470,35 +12013,47 @@ }, "node_modules/is-yarn-global": { "version": "0.3.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", + "integrity": "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==" }, "node_modules/isarray": { "version": "0.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" }, "node_modules/isexe": { "version": "2.0.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, "node_modules/isobject": { "version": "3.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", "engines": { "node": ">=0.10.0" } }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", + "dev": true + }, "node_modules/istanbul-lib-coverage": { "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", + "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", "dev": true, - "license": "BSD-3-Clause", "engines": { "node": ">=8" } }, "node_modules/istanbul-lib-instrument": { - "version": "5.2.0", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { "@babel/core": "^7.12.3", "@babel/parser": "^7.14.7", @@ -10512,16 +12067,18 @@ }, "node_modules/istanbul-lib-instrument/node_modules/semver": { "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true, - "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/istanbul-lib-report": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { "istanbul-lib-coverage": "^3.0.0", "make-dir": "^3.0.0", @@ -10533,16 +12090,18 @@ }, "node_modules/istanbul-lib-report/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/istanbul-lib-report/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -10552,8 +12111,9 @@ }, "node_modules/istanbul-lib-source-maps": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { "debug": "^4.1.1", "istanbul-lib-coverage": "^3.0.0", @@ -10564,9 +12124,10 @@ } }, "node_modules/istanbul-reports": { - "version": "3.1.4", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz", + "integrity": "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { "html-escaper": "^2.0.0", "istanbul-lib-report": "^3.0.0" @@ -10577,8 +12138,9 @@ }, "node_modules/jest": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz", + "integrity": "sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==", "dev": true, - "license": "MIT", "dependencies": { "@jest/core": "^27.5.1", "import-local": "^3.0.2", @@ -10601,8 +12163,9 @@ }, "node_modules/jest-changed-files": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-27.5.1.tgz", + "integrity": "sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw==", "dev": true, - "license": "MIT", "dependencies": { "@jest/types": "^27.5.1", "execa": "^5.0.0", @@ -10612,39 +12175,85 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-circus": { - "version": "27.5.1", + "node_modules/jest-changed-files/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/environment": "^27.5.1", - "@jest/test-result": "^27.5.1", - "@jest/types": "^27.5.1", - "@types/node": "*", - "chalk": "^4.0.0", - "co": "^4.6.0", - "dedent": "^0.7.0", - "expect": "^27.5.1", - "is-generator-fn": "^2.0.0", - "jest-each": "^27.5.1", - "jest-matcher-utils": "^27.5.1", - "jest-message-util": "^27.5.1", - "jest-runtime": "^27.5.1", - "jest-snapshot": "^27.5.1", - "jest-util": "^27.5.1", - "pretty-format": "^27.5.1", - "slash": "^3.0.0", - "stack-utils": "^2.0.3", - "throat": "^6.0.1" - }, - "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/jest-changed-files/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-changed-files/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/jest-circus": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-27.5.1.tgz", + "integrity": "sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw==", + "dev": true, + "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^0.7.0", + "expect": "^27.5.1", + "is-generator-fn": "^2.0.0", + "jest-each": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3", + "throat": "^6.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/jest-circus/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -10657,8 +12266,9 @@ }, "node_modules/jest-circus/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -10672,8 +12282,9 @@ }, "node_modules/jest-circus/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -10683,21 +12294,124 @@ }, "node_modules/jest-circus/node_modules/color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-circus/node_modules/diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-circus/node_modules/expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", "dev": true, - "license": "MIT" + "dependencies": { + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } }, "node_modules/jest-circus/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/jest-circus/node_modules/jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-circus/node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-circus/node_modules/jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-circus/node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-circus/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, "node_modules/jest-circus/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -10707,8 +12421,9 @@ }, "node_modules/jest-cli": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-27.5.1.tgz", + "integrity": "sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw==", "dev": true, - "license": "MIT", "dependencies": { "@jest/core": "^27.5.1", "@jest/test-result": "^27.5.1", @@ -10740,8 +12455,9 @@ }, "node_modules/jest-cli/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -10754,8 +12470,9 @@ }, "node_modules/jest-cli/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -10769,8 +12486,9 @@ }, "node_modules/jest-cli/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -10780,21 +12498,41 @@ }, "node_modules/jest-cli/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/jest-cli/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/jest-cli/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, "node_modules/jest-cli/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -10804,8 +12542,9 @@ }, "node_modules/jest-config": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-27.5.1.tgz", + "integrity": "sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA==", "dev": true, - "license": "MIT", "dependencies": { "@babel/core": "^7.8.0", "@jest/test-sequencer": "^27.5.1", @@ -10846,8 +12585,9 @@ }, "node_modules/jest-config/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -10860,8 +12600,9 @@ }, "node_modules/jest-config/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -10875,8 +12616,9 @@ }, "node_modules/jest-config/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -10886,21 +12628,50 @@ }, "node_modules/jest-config/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/jest-config/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/jest-config/node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-config/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, "node_modules/jest-config/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -10909,23 +12680,25 @@ } }, "node_modules/jest-diff": { - "version": "27.5.1", + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.2.1.tgz", + "integrity": "sha512-gfh/SMNlQmP3MOUgdzxPOd4XETDJifADpT937fN1iUGz+9DgOu2eUPHH25JDkLVcLwwqxv3GzVyK4VBUr9fjfA==", "dev": true, - "license": "MIT", "dependencies": { "chalk": "^4.0.0", - "diff-sequences": "^27.5.1", - "jest-get-type": "^27.5.1", - "pretty-format": "^27.5.1" + "diff-sequences": "^29.2.0", + "jest-get-type": "^29.2.0", + "pretty-format": "^29.2.1" }, "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, "node_modules/jest-diff/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -10938,8 +12711,9 @@ }, "node_modules/jest-diff/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -10953,8 +12727,9 @@ }, "node_modules/jest-diff/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -10964,21 +12739,50 @@ }, "node_modules/jest-diff/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/jest-diff/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/jest-diff/node_modules/pretty-format": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.2.1.tgz", + "integrity": "sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.0.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-diff/node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "node_modules/jest-diff/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -10988,8 +12792,9 @@ }, "node_modules/jest-docblock": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-27.5.1.tgz", + "integrity": "sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ==", "dev": true, - "license": "MIT", "dependencies": { "detect-newline": "^3.0.0" }, @@ -10999,8 +12804,9 @@ }, "node_modules/jest-each": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-27.5.1.tgz", + "integrity": "sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ==", "dev": true, - "license": "MIT", "dependencies": { "@jest/types": "^27.5.1", "chalk": "^4.0.0", @@ -11014,8 +12820,9 @@ }, "node_modules/jest-each/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -11028,8 +12835,9 @@ }, "node_modules/jest-each/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11043,8 +12851,9 @@ }, "node_modules/jest-each/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11054,21 +12863,50 @@ }, "node_modules/jest-each/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/jest-each/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/jest-each/node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-each/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, "node_modules/jest-each/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -11078,8 +12916,9 @@ }, "node_modules/jest-environment-jsdom": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz", + "integrity": "sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw==", "dev": true, - "license": "MIT", "dependencies": { "@jest/environment": "^27.5.1", "@jest/fake-timers": "^27.5.1", @@ -11093,86 +12932,115 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-environment-node": { - "version": "27.5.1", + "node_modules/jest-environment-jsdom/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/environment": "^27.5.1", - "@jest/fake-timers": "^27.5.1", - "@jest/types": "^27.5.1", - "@types/node": "*", - "jest-mock": "^27.5.1", - "jest-util": "^27.5.1" + "color-convert": "^2.0.1" }, "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-get-type": { - "version": "27.5.1", + "node_modules/jest-environment-jsdom/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-haste-map": { - "version": "27.5.1", + "node_modules/jest-environment-jsdom/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/types": "^27.5.1", - "@types/graceful-fs": "^4.1.2", - "@types/node": "*", - "anymatch": "^3.0.3", - "fb-watchman": "^2.0.0", - "graceful-fs": "^4.2.9", - "jest-regex-util": "^27.5.1", - "jest-serializer": "^27.5.1", - "jest-util": "^27.5.1", - "jest-worker": "^27.5.1", - "micromatch": "^4.0.4", - "walker": "^1.0.7" + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-environment-jsdom/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-environment-jsdom/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-environment-jsdom/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-environment-jsdom/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" }, - "optionalDependencies": { - "fsevents": "^2.3.2" + "engines": { + "node": ">=8" } }, - "node_modules/jest-jasmine2": { + "node_modules/jest-environment-node": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-27.5.1.tgz", + "integrity": "sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw==", "dev": true, - "license": "MIT", "dependencies": { "@jest/environment": "^27.5.1", - "@jest/source-map": "^27.5.1", - "@jest/test-result": "^27.5.1", + "@jest/fake-timers": "^27.5.1", "@jest/types": "^27.5.1", "@types/node": "*", - "chalk": "^4.0.0", - "co": "^4.6.0", - "expect": "^27.5.1", - "is-generator-fn": "^2.0.0", - "jest-each": "^27.5.1", - "jest-matcher-utils": "^27.5.1", - "jest-message-util": "^27.5.1", - "jest-runtime": "^27.5.1", - "jest-snapshot": "^27.5.1", - "jest-util": "^27.5.1", - "pretty-format": "^27.5.1", - "throat": "^6.0.1" + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-jasmine2/node_modules/ansi-styles": { + "node_modules/jest-environment-node/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -11183,10 +13051,11 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-jasmine2/node_modules/chalk": { + "node_modules/jest-environment-node/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11198,10 +13067,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-jasmine2/node_modules/color-convert": { + "node_modules/jest-environment-node/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11209,23 +13079,43 @@ "node": ">=7.0.0" } }, - "node_modules/jest-jasmine2/node_modules/color-name": { + "node_modules/jest-environment-node/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "node_modules/jest-jasmine2/node_modules/has-flag": { + "node_modules/jest-environment-node/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/jest-jasmine2/node_modules/supports-color": { + "node_modules/jest-environment-node/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-environment-node/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -11233,36 +13123,46 @@ "node": ">=8" } }, - "node_modules/jest-leak-detector": { - "version": "27.5.1", + "node_modules/jest-get-type": { + "version": "29.2.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.2.0.tgz", + "integrity": "sha512-uXNJlg8hKFEnDgFsrCjznB+sTxdkuqiCL6zMgA75qEbAJjJYTs9XPrvDctrEig2GDow22T/LvHgO57iJhXB/UA==", "dev": true, - "license": "MIT", - "dependencies": { - "jest-get-type": "^27.5.1", - "pretty-format": "^27.5.1" - }, "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/jest-matcher-utils": { + "node_modules/jest-haste-map": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.5.1.tgz", + "integrity": "sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng==", "dev": true, - "license": "MIT", "dependencies": { - "chalk": "^4.0.0", - "jest-diff": "^27.5.1", - "jest-get-type": "^27.5.1", - "pretty-format": "^27.5.1" + "@jest/types": "^27.5.1", + "@types/graceful-fs": "^4.1.2", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^27.5.1", + "jest-serializer": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "micromatch": "^4.0.4", + "walker": "^1.0.7" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" } }, - "node_modules/jest-matcher-utils/node_modules/ansi-styles": { + "node_modules/jest-haste-map/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -11273,10 +13173,11 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-matcher-utils/node_modules/chalk": { + "node_modules/jest-haste-map/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11288,10 +13189,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-matcher-utils/node_modules/color-convert": { + "node_modules/jest-haste-map/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11299,23 +13201,43 @@ "node": ">=7.0.0" } }, - "node_modules/jest-matcher-utils/node_modules/color-name": { + "node_modules/jest-haste-map/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "node_modules/jest-matcher-utils/node_modules/has-flag": { + "node_modules/jest-haste-map/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/jest-matcher-utils/node_modules/supports-color": { + "node_modules/jest-haste-map/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-haste-map/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -11323,29 +13245,39 @@ "node": ">=8" } }, - "node_modules/jest-message-util": { + "node_modules/jest-jasmine2": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz", + "integrity": "sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.12.13", + "@jest/environment": "^27.5.1", + "@jest/source-map": "^27.5.1", + "@jest/test-result": "^27.5.1", "@jest/types": "^27.5.1", - "@types/stack-utils": "^2.0.0", + "@types/node": "*", "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "micromatch": "^4.0.4", + "co": "^4.6.0", + "expect": "^27.5.1", + "is-generator-fn": "^2.0.0", + "jest-each": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", "pretty-format": "^27.5.1", - "slash": "^3.0.0", - "stack-utils": "^2.0.3" + "throat": "^6.0.1" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-message-util/node_modules/ansi-styles": { + "node_modules/jest-jasmine2/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -11356,10 +13288,11 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-message-util/node_modules/chalk": { + "node_modules/jest-jasmine2/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11371,10 +13304,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-message-util/node_modules/color-convert": { + "node_modules/jest-jasmine2/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11382,198 +13316,175 @@ "node": ">=7.0.0" } }, - "node_modules/jest-message-util/node_modules/color-name": { + "node_modules/jest-jasmine2/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "node_modules/jest-message-util/node_modules/has-flag": { - "version": "4.0.0", + "node_modules/jest-jasmine2/node_modules/diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", "dev": true, - "license": "MIT", "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-message-util/node_modules/supports-color": { - "version": "7.2.0", + "node_modules/jest-jasmine2/node_modules/expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", "dev": true, - "license": "MIT", "dependencies": { - "has-flag": "^4.0.0" + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-jasmine2/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, "engines": { "node": ">=8" } }, - "node_modules/jest-mock": { + "node_modules/jest-jasmine2/node_modules/jest-diff": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/types": "^27.5.1", - "@types/node": "*" + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-pnp-resolver": { - "version": "1.2.2", + "node_modules/jest-jasmine2/node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", "dev": true, - "license": "MIT", "engines": { - "node": ">=6" - }, - "peerDependencies": { - "jest-resolve": "*" - }, - "peerDependenciesMeta": { - "jest-resolve": { - "optional": true - } + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-regex-util": { + "node_modules/jest-jasmine2/node_modules/jest-matcher-utils": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", "dev": true, - "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-resolve": { + "node_modules/jest-jasmine2/node_modules/jest-message-util": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", "dev": true, - "license": "MIT", "dependencies": { + "@babel/code-frame": "^7.12.13", "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", "chalk": "^4.0.0", "graceful-fs": "^4.2.9", - "jest-haste-map": "^27.5.1", - "jest-pnp-resolver": "^1.2.2", - "jest-util": "^27.5.1", - "jest-validate": "^27.5.1", - "resolve": "^1.20.0", - "resolve.exports": "^1.1.0", - "slash": "^3.0.0" + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-resolve-dependencies": { + "node_modules/jest-jasmine2/node_modules/jest-util": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", "dev": true, - "license": "MIT", "dependencies": { "@jest/types": "^27.5.1", - "jest-regex-util": "^27.5.1", - "jest-snapshot": "^27.5.1" + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-resolve/node_modules/ansi-styles": { - "version": "4.3.0", + "node_modules/jest-jasmine2/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { - "color-convert": "^2.0.1" + "has-flag": "^4.0.0" }, "engines": { "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-resolve/node_modules/chalk": { - "version": "4.1.2", + "node_modules/jest-leak-detector": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz", + "integrity": "sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ==", "dev": true, - "license": "MIT", "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-resolve/node_modules/color-convert": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/jest-resolve/node_modules/color-name": { - "version": "1.1.4", - "dev": true, - "license": "MIT" - }, - "node_modules/jest-resolve/node_modules/has-flag": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-resolve/node_modules/supports-color": { - "version": "7.2.0", + "node_modules/jest-leak-detector/node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-runner": { - "version": "27.5.1", + "node_modules/jest-matcher-utils": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.2.1.tgz", + "integrity": "sha512-hUTBh7H/Mnb6GTpihbLh8uF5rjAMdekfW/oZNXUMAXi7bbmym2HiRpzgqf/zzkjgejMrVAkPdVSQj+32enlUww==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/console": "^27.5.1", - "@jest/environment": "^27.5.1", - "@jest/test-result": "^27.5.1", - "@jest/transform": "^27.5.1", - "@jest/types": "^27.5.1", - "@types/node": "*", "chalk": "^4.0.0", - "emittery": "^0.8.1", - "graceful-fs": "^4.2.9", - "jest-docblock": "^27.5.1", - "jest-environment-jsdom": "^27.5.1", - "jest-environment-node": "^27.5.1", - "jest-haste-map": "^27.5.1", - "jest-leak-detector": "^27.5.1", - "jest-message-util": "^27.5.1", - "jest-resolve": "^27.5.1", - "jest-runtime": "^27.5.1", - "jest-util": "^27.5.1", - "jest-worker": "^27.5.1", - "source-map-support": "^0.5.6", - "throat": "^6.0.1" + "jest-diff": "^29.2.1", + "jest-get-type": "^29.2.0", + "pretty-format": "^29.2.1" }, "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/jest-runner/node_modules/ansi-styles": { + "node_modules/jest-matcher-utils/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -11584,10 +13495,11 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-runner/node_modules/chalk": { + "node_modules/jest-matcher-utils/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11599,10 +13511,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-runner/node_modules/color-convert": { + "node_modules/jest-matcher-utils/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11610,23 +13523,52 @@ "node": ">=7.0.0" } }, - "node_modules/jest-runner/node_modules/color-name": { + "node_modules/jest-matcher-utils/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "node_modules/jest-runner/node_modules/has-flag": { + "node_modules/jest-matcher-utils/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/jest-runner/node_modules/supports-color": { + "node_modules/jest-matcher-utils/node_modules/pretty-format": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.2.1.tgz", + "integrity": "sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.0.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils/node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-matcher-utils/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -11634,42 +13576,57 @@ "node": ">=8" } }, - "node_modules/jest-runtime": { - "version": "27.5.1", + "node_modules/jest-message-util": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.2.1.tgz", + "integrity": "sha512-Dx5nEjw9V8C1/Yj10S/8ivA8F439VS8vTq1L7hEgwHFn9ovSKNpYW/kwNh7UglaEgXO42XxzKJB+2x0nSglFVw==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/environment": "^27.5.1", - "@jest/fake-timers": "^27.5.1", - "@jest/globals": "^27.5.1", - "@jest/source-map": "^27.5.1", - "@jest/test-result": "^27.5.1", - "@jest/transform": "^27.5.1", - "@jest/types": "^27.5.1", + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.2.1", + "@types/stack-utils": "^2.0.0", "chalk": "^4.0.0", - "cjs-module-lexer": "^1.0.0", - "collect-v8-coverage": "^1.0.0", - "execa": "^5.0.0", - "glob": "^7.1.3", "graceful-fs": "^4.2.9", - "jest-haste-map": "^27.5.1", - "jest-message-util": "^27.5.1", - "jest-mock": "^27.5.1", - "jest-regex-util": "^27.5.1", - "jest-resolve": "^27.5.1", - "jest-snapshot": "^27.5.1", - "jest-util": "^27.5.1", + "micromatch": "^4.0.4", + "pretty-format": "^29.2.1", "slash": "^3.0.0", - "strip-bom": "^4.0.0" + "stack-utils": "^2.0.3" }, "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/jest-runtime/node_modules/ansi-styles": { + "node_modules/jest-message-util/node_modules/@jest/types": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.2.1.tgz", + "integrity": "sha512-O/QNDQODLnINEPAI0cl9U6zUIDXEWXt6IC1o2N2QENuos7hlGUIthlKyV4p6ki3TvXFX071blj8HUhgLGquPjw==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.0.0", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util/node_modules/@types/yargs": { + "version": "17.0.13", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.13.tgz", + "integrity": "sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/jest-message-util/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -11680,10 +13637,11 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-runtime/node_modules/chalk": { + "node_modules/jest-message-util/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11695,10 +13653,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-runtime/node_modules/color-convert": { + "node_modules/jest-message-util/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11706,23 +13665,52 @@ "node": ">=7.0.0" } }, - "node_modules/jest-runtime/node_modules/color-name": { + "node_modules/jest-message-util/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "node_modules/jest-runtime/node_modules/has-flag": { + "node_modules/jest-message-util/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/jest-runtime/node_modules/supports-color": { + "node_modules/jest-message-util/node_modules/pretty-format": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.2.1.tgz", + "integrity": "sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.0.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util/node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-message-util/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -11730,54 +13718,85 @@ "node": ">=8" } }, - "node_modules/jest-serializer": { + "node_modules/jest-mock": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-27.5.1.tgz", + "integrity": "sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og==", "dev": true, - "license": "MIT", "dependencies": { - "@types/node": "*", - "graceful-fs": "^4.2.9" + "@jest/types": "^27.5.1", + "@types/node": "*" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-snapshot": { + "node_modules/jest-pnp-resolver": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz", + "integrity": "sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==", + "dev": true, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.5.1.tgz", + "integrity": "sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-resolve": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-27.5.1.tgz", + "integrity": "sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/core": "^7.7.2", - "@babel/generator": "^7.7.2", - "@babel/plugin-syntax-typescript": "^7.7.2", - "@babel/traverse": "^7.7.2", - "@babel/types": "^7.0.0", - "@jest/transform": "^27.5.1", "@jest/types": "^27.5.1", - "@types/babel__traverse": "^7.0.4", - "@types/prettier": "^2.1.5", - "babel-preset-current-node-syntax": "^1.0.0", "chalk": "^4.0.0", - "expect": "^27.5.1", "graceful-fs": "^4.2.9", - "jest-diff": "^27.5.1", - "jest-get-type": "^27.5.1", "jest-haste-map": "^27.5.1", - "jest-matcher-utils": "^27.5.1", - "jest-message-util": "^27.5.1", + "jest-pnp-resolver": "^1.2.2", "jest-util": "^27.5.1", - "natural-compare": "^1.4.0", - "pretty-format": "^27.5.1", - "semver": "^7.3.2" + "jest-validate": "^27.5.1", + "resolve": "^1.20.0", + "resolve.exports": "^1.1.0", + "slash": "^3.0.0" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-snapshot/node_modules/ansi-styles": { + "node_modules/jest-resolve-dependencies": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz", + "integrity": "sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-snapshot": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-resolve/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -11788,10 +13807,11 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-snapshot/node_modules/chalk": { + "node_modules/jest-resolve/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11803,10 +13823,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-snapshot/node_modules/color-convert": { + "node_modules/jest-resolve/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11814,34 +13835,26 @@ "node": ">=7.0.0" } }, - "node_modules/jest-snapshot/node_modules/color-name": { + "node_modules/jest-resolve/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "node_modules/jest-snapshot/node_modules/has-flag": { + "node_modules/jest-resolve/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-snapshot/node_modules/supports-color": { - "version": "7.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, "engines": { "node": ">=8" } }, - "node_modules/jest-util": { + "node_modules/jest-resolve/node_modules/jest-util": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", "dev": true, - "license": "MIT", "dependencies": { "@jest/types": "^27.5.1", "@types/node": "*", @@ -11854,27 +13867,73 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-util/node_modules/ansi-styles": { - "version": "4.3.0", + "node_modules/jest-resolve/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { - "color-convert": "^2.0.1" + "has-flag": "^4.0.0" }, "engines": { "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-util/node_modules/chalk": { - "version": "4.1.2", + "node_modules/jest-runner": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-27.5.1.tgz", + "integrity": "sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ==", "dev": true, - "license": "MIT", "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "@jest/console": "^27.5.1", + "@jest/environment": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.8.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^27.5.1", + "jest-environment-jsdom": "^27.5.1", + "jest-environment-node": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-leak-detector": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "source-map-support": "^0.5.6", + "throat": "^6.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-runner/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-runner/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" }, "engines": { "node": ">=10" @@ -11883,10 +13942,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-util/node_modules/color-convert": { + "node_modules/jest-runner/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11894,23 +13954,63 @@ "node": ">=7.0.0" } }, - "node_modules/jest-util/node_modules/color-name": { + "node_modules/jest-runner/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "node_modules/jest-util/node_modules/has-flag": { + "node_modules/jest-runner/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/jest-util/node_modules/supports-color": { + "node_modules/jest-runner/node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-runner/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-runner/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -11918,26 +14018,44 @@ "node": ">=8" } }, - "node_modules/jest-validate": { + "node_modules/jest-runtime": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.5.1.tgz", + "integrity": "sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A==", "dev": true, - "license": "MIT", "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/globals": "^27.5.1", + "@jest/source-map": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", "@jest/types": "^27.5.1", - "camelcase": "^6.2.0", "chalk": "^4.0.0", - "jest-get-type": "^27.5.1", - "leven": "^3.1.0", - "pretty-format": "^27.5.1" + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "execa": "^5.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-mock": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-validate/node_modules/ansi-styles": { + "node_modules/jest-runtime/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -11948,10 +14066,11 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-validate/node_modules/chalk": { + "node_modules/jest-runtime/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -11963,10 +14082,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-validate/node_modules/color-convert": { + "node_modules/jest-runtime/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -11974,23 +14094,107 @@ "node": ">=7.0.0" } }, - "node_modules/jest-validate/node_modules/color-name": { + "node_modules/jest-runtime/node_modules/color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-runtime/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/jest-runtime/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "dev": true, - "license": "MIT" + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, - "node_modules/jest-validate/node_modules/has-flag": { + "node_modules/jest-runtime/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/jest-validate/node_modules/supports-color": { + "node_modules/jest-runtime/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/jest-runtime/node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-runtime/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-runtime/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -11998,27 +14202,57 @@ "node": ">=8" } }, - "node_modules/jest-watcher": { + "node_modules/jest-serializer": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-27.5.1.tgz", + "integrity": "sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w==", "dev": true, - "license": "MIT", "dependencies": { - "@jest/test-result": "^27.5.1", - "@jest/types": "^27.5.1", "@types/node": "*", - "ansi-escapes": "^4.2.1", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-27.5.1.tgz", + "integrity": "sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA==", + "dev": true, + "dependencies": { + "@babel/core": "^7.7.2", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/traverse": "^7.7.2", + "@babel/types": "^7.0.0", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/babel__traverse": "^7.0.4", + "@types/prettier": "^2.1.5", + "babel-preset-current-node-syntax": "^1.0.0", "chalk": "^4.0.0", + "expect": "^27.5.1", + "graceful-fs": "^4.2.9", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", "jest-util": "^27.5.1", - "string-length": "^4.0.1" + "natural-compare": "^1.4.0", + "pretty-format": "^27.5.1", + "semver": "^7.3.2" }, "engines": { "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-watcher/node_modules/ansi-styles": { + "node_modules/jest-snapshot/node_modules/ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -12029,10 +14263,11 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-watcher/node_modules/chalk": { + "node_modules/jest-snapshot/node_modules/chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -12044,10 +14279,11 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-watcher/node_modules/color-convert": { + "node_modules/jest-snapshot/node_modules/color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, - "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -12055,25 +14291,442 @@ "node": ">=7.0.0" } }, - "node_modules/jest-watcher/node_modules/color-name": { + "node_modules/jest-snapshot/node_modules/color-name": { "version": "1.1.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, - "node_modules/jest-watcher/node_modules/has-flag": { - "version": "4.0.0", + "node_modules/jest-snapshot/node_modules/diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", "dev": true, - "license": "MIT", "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-watcher/node_modules/supports-color": { - "version": "7.2.0", + "node_modules/jest-snapshot/node_modules/expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", "dev": true, - "license": "MIT", "dependencies": { - "has-flag": "^4.0.0" + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-snapshot/node_modules/jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-util": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.2.1.tgz", + "integrity": "sha512-P5VWDj25r7kj7kl4pN2rG/RN2c1TLfYYYZYULnS/35nFDjBai+hBeo3MDrYZS7p6IoY3YHZnt2vq4L6mKnLk0g==", + "dev": true, + "dependencies": { + "@jest/types": "^29.2.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util/node_modules/@jest/types": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.2.1.tgz", + "integrity": "sha512-O/QNDQODLnINEPAI0cl9U6zUIDXEWXt6IC1o2N2QENuos7hlGUIthlKyV4p6ki3TvXFX071blj8HUhgLGquPjw==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.0.0", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util/node_modules/@types/yargs": { + "version": "17.0.13", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.13.tgz", + "integrity": "sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/jest-util/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-util/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-util/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-util/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-util/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-util/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-validate": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-27.5.1.tgz", + "integrity": "sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^27.5.1", + "leven": "^3.1.0", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-validate/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-validate/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-validate/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-validate/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-validate/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-validate/node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-validate/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-watcher": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-27.5.1.tgz", + "integrity": "sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw==", + "dev": true, + "dependencies": { + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "jest-util": "^27.5.1", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-watcher/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-watcher/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-watcher/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-watcher/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-watcher/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-watcher/node_modules/jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-watcher/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" }, "engines": { "node": ">=8" @@ -12081,7 +14734,8 @@ }, "node_modules/jest-worker": { "version": "27.5.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", "dependencies": { "@types/node": "*", "merge-stream": "^2.0.0", @@ -12093,14 +14747,16 @@ }, "node_modules/jest-worker/node_modules/has-flag": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/jest-worker/node_modules/supports-color": { "version": "8.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dependencies": { "has-flag": "^4.0.0" }, @@ -12112,8 +14768,9 @@ } }, "node_modules/joi": { - "version": "17.6.0", - "license": "BSD-3-Clause", + "version": "17.6.3", + "resolved": "https://registry.npmjs.org/joi/-/joi-17.6.3.tgz", + "integrity": "sha512-YlQsIaS9MHYekzf1Qe11LjTkNzx9qhYluK3172z38RxYoAUf82XMX1p1DG1H4Wtk2ED/vPdSn9OggqtDu+aTow==", "dependencies": { "@hapi/hoek": "^9.0.0", "@hapi/topo": "^5.0.0", @@ -12123,23 +14780,27 @@ } }, "node_modules/jpeg-js": { - "version": "0.4.2", - "license": "BSD-3-Clause" + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.4.tgz", + "integrity": "sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==" }, "node_modules/js-levenshtein": { "version": "1.1.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/js-levenshtein/-/js-levenshtein-1.1.6.tgz", + "integrity": "sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==", "engines": { "node": ">=0.10.0" } }, "node_modules/js-tokens": { "version": "4.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, "node_modules/js-yaml": { "version": "4.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dependencies": { "argparse": "^2.0.1" }, @@ -12147,10 +14808,17 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", + "dev": true + }, "node_modules/jsdom": { "version": "16.7.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz", + "integrity": "sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==", "dev": true, - "license": "MIT", "dependencies": { "abab": "^2.0.5", "acorn": "^8.2.4", @@ -12192,9 +14860,54 @@ } } }, + "node_modules/jsdom/node_modules/form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/jsdom/node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true + }, + "node_modules/jsdom/node_modules/tough-cookie": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.2.tgz", + "integrity": "sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ==", + "dev": true, + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsdom/node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, "node_modules/jsesc": { "version": "2.5.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "bin": { "jsesc": "bin/jsesc" }, @@ -12204,30 +14917,43 @@ }, "node_modules/json-buffer": { "version": "3.0.0", - "license": "MIT" - }, - "node_modules/json-parse-better-errors": { - "version": "1.0.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==" }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "node_modules/json-pointer": { "version": "0.6.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/json-pointer/-/json-pointer-0.6.2.tgz", + "integrity": "sha512-vLWcKbOaXlO+jvRy4qNd+TI1QUPZzfJj1tpJ3vAXDych5XJf93ftpUKe5pKCrzyIIwgBJcOcCVRUfqQP25afBw==", "dependencies": { "foreach": "^2.0.4" } }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true + }, "node_modules/json-schema-traverse": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true }, "node_modules/json5": { "version": "2.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", + "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", "bin": { "json5": "lib/cli.js" }, @@ -12235,9 +14961,16 @@ "node": ">=6" } }, + "node_modules/jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", + "dev": true + }, "node_modules/jsonfile": { "version": "6.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "dependencies": { "universalify": "^2.0.0" }, @@ -12245,37 +14978,80 @@ "graceful-fs": "^4.1.6" } }, + "node_modules/jsprim": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, + "node_modules/katex": { + "version": "0.13.24", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.13.24.tgz", + "integrity": "sha512-jZxYuKCma3VS5UuxOx/rFV1QyGSl3Uy/i0kTJF3HgQ5xMinCQVF8Zd4bMY/9aI9b9A2pjIBOsjSSm68ykTAr8w==", + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "dependencies": { + "commander": "^8.0.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "engines": { + "node": ">= 12" + } + }, "node_modules/keyv": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", + "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", "dependencies": { "json-buffer": "3.0.0" } }, "node_modules/kind-of": { "version": "6.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", "engines": { "node": ">=0.10.0" } }, "node_modules/kleur": { "version": "3.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", "engines": { "node": ">=6" } }, "node_modules/klona": { "version": "2.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/klona/-/klona-2.0.5.tgz", + "integrity": "sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==", "engines": { "node": ">= 8" } }, "node_modules/latest-version": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz", + "integrity": "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==", "dependencies": { "package-json": "^6.3.0" }, @@ -12283,17 +15059,28 @@ "node": ">=8" } }, + "node_modules/lazy-ass": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", + "integrity": "sha512-cc8oEVoctTvsFZ/Oje/kGnHbpWHYBe8IAJe4C0QNc3t8uM/0Y8+erSz/7Y1ALuXTEZTMvxXwO6YbX1ey3ujiZw==", + "dev": true, + "engines": { + "node": "> 0.8" + } + }, "node_modules/leven": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", "engines": { "node": ">=6" } }, "node_modules/levn": { "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", "dev": true, - "license": "MIT", "dependencies": { "prelude-ls": "~1.1.2", "type-check": "~0.3.2" @@ -12303,19 +15090,119 @@ } }, "node_modules/lilconfig": { - "version": "2.0.5", - "license": "MIT", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.6.tgz", + "integrity": "sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==", "engines": { "node": ">=10" } }, "node_modules/lines-and-columns": { "version": "1.2.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" + }, + "node_modules/listr2": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", + "integrity": "sha512-TyWI8G99GX9GjE54cJ+RrNMcIFBfwMPxc3XTFiAYGN4s10hWROGtOg7+O6u6LE3mNkyld7RSLE6nrKBvTfcs3g==", + "dev": true, + "dependencies": { + "cli-truncate": "^2.1.0", + "colorette": "^2.0.16", + "log-update": "^4.0.0", + "p-map": "^4.0.0", + "rfdc": "^1.3.0", + "rxjs": "^7.5.1", + "through": "^2.3.8", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "enquirer": ">= 2.3.0 < 3" + }, + "peerDependenciesMeta": { + "enquirer": { + "optional": true + } + } + }, + "node_modules/listr2/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/listr2/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/listr2/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/listr2/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/listr2/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/listr2/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } }, "node_modules/load-bmfont": { "version": "1.4.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/load-bmfont/-/load-bmfont-1.4.1.tgz", + "integrity": "sha512-8UyQoYmdRDy81Brz6aLAUhfZLwr5zV0L3taTQ4hju7m6biuwiWiJXjPhBJxbUQJA8PrkvJ/7Enqmwk2sM14soA==", "dependencies": { "buffer-equal": "0.0.1", "mime": "^1.3.4", @@ -12329,116 +15216,269 @@ }, "node_modules/loader-runner": { "version": "4.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", + "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", + "engines": { + "node": ">=6.11.5" + } + }, + "node_modules/loader-utils": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz", + "integrity": "sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==", + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + }, + "engines": { + "node": ">=8.9.0" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash.curry": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz", + "integrity": "sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA==" + }, + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" + }, + "node_modules/lodash.flow": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/lodash.flow/-/lodash.flow-3.5.0.tgz", + "integrity": "sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw==" + }, + "node_modules/lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==" + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "dev": true + }, + "node_modules/lodash.uniq": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", + "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-symbols/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/log-symbols/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/log-symbols/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/log-symbols/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/log-symbols/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dev": true, + "dependencies": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-update/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/log-update/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/log-update/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/log-update/node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, "engines": { - "node": ">=6.11.5" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" } }, - "node_modules/loader-utils": { - "version": "2.0.2", - "license": "MIT", + "node_modules/log-update/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" }, "engines": { - "node": ">=8.9.0" + "node": ">=8" } }, - "node_modules/locate-path": { - "version": "5.0.0", - "license": "MIT", + "node_modules/log-update/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, "dependencies": { - "p-locate": "^4.1.0" + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/lodash": { - "version": "4.17.21", - "license": "MIT" - }, - "node_modules/lodash.assignin": { - "version": "4.2.0", - "license": "MIT" - }, - "node_modules/lodash.bind": { - "version": "4.2.1", - "license": "MIT" - }, - "node_modules/lodash.curry": { - "version": "4.1.1", - "license": "MIT" - }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "license": "MIT" - }, - "node_modules/lodash.defaults": { - "version": "4.2.0", - "license": "MIT" - }, - "node_modules/lodash.filter": { - "version": "4.6.0", - "license": "MIT" - }, - "node_modules/lodash.flatten": { - "version": "4.4.0", - "license": "MIT" - }, - "node_modules/lodash.flow": { - "version": "3.5.0", - "license": "MIT" - }, - "node_modules/lodash.foreach": { - "version": "4.5.0", - "license": "MIT" - }, - "node_modules/lodash.isequal": { - "version": "4.5.0", - "license": "MIT" - }, - "node_modules/lodash.map": { - "version": "4.6.0", - "license": "MIT" - }, - "node_modules/lodash.memoize": { - "version": "4.1.2", - "license": "MIT" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "license": "MIT" - }, - "node_modules/lodash.pick": { - "version": "4.4.0", - "license": "MIT" - }, - "node_modules/lodash.reduce": { - "version": "4.6.0", - "license": "MIT" - }, - "node_modules/lodash.reject": { - "version": "4.6.0", - "license": "MIT" - }, - "node_modules/lodash.some": { - "version": "4.6.0", - "license": "MIT" - }, - "node_modules/lodash.sortby": { - "version": "4.7.0", - "license": "MIT" - }, - "node_modules/lodash.uniq": { - "version": "4.5.0", - "license": "MIT" - }, "node_modules/loose-envify": { "version": "1.4.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, @@ -12448,21 +15488,24 @@ }, "node_modules/lower-case": { "version": "2.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", "dependencies": { "tslib": "^2.0.3" } }, "node_modules/lowercase-keys": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", "engines": { "node": ">=0.10.0" } }, "node_modules/lru-cache": { "version": "6.0.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dependencies": { "yallist": "^4.0.0" }, @@ -12472,19 +15515,22 @@ }, "node_modules/lunr": { "version": "2.3.9", - "license": "MIT" + "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", + "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==" }, "node_modules/lz-string": { "version": "1.4.4", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.4.4.tgz", + "integrity": "sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ==", "dev": true, - "license": "WTFPL", "bin": { "lz-string": "bin/bin.js" } }, "node_modules/make-dir": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dependencies": { "semver": "^6.0.0" }, @@ -12497,34 +15543,39 @@ }, "node_modules/make-dir/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/makeerror": { "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { "tmpl": "1.0.5" } }, "node_modules/mark.js": { "version": "8.11.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz", + "integrity": "sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==" }, "node_modules/markdown-escapes": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", + "integrity": "sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/marked": { - "version": "4.0.15", - "license": "MIT", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.1.1.tgz", + "integrity": "sha512-0cNMnTcUJPxbA6uWmCmjWz4NJRe/0Xfk2NhXCUHjew9qJzFN20krFnsUe7QynwqOwa5m1fZ4UDg0ycKFVC0ccw==", "bin": { "marked": "bin/marked.js" }, @@ -12534,7 +15585,8 @@ }, "node_modules/md5.js": { "version": "1.3.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", "dependencies": { "hash-base": "^3.0.0", "inherits": "^2.0.1", @@ -12543,7 +15595,8 @@ }, "node_modules/mdast-squeeze-paragraphs": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-4.0.0.tgz", + "integrity": "sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ==", "dependencies": { "unist-util-remove": "^2.0.0" }, @@ -12554,7 +15607,8 @@ }, "node_modules/mdast-util-definitions": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz", + "integrity": "sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==", "dependencies": { "unist-util-visit": "^2.0.0" }, @@ -12565,7 +15619,8 @@ }, "node_modules/mdast-util-to-hast": { "version": "10.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz", + "integrity": "sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA==", "dependencies": { "@types/mdast": "^3.0.0", "@types/unist": "^2.0.0", @@ -12583,32 +15638,37 @@ }, "node_modules/mdast-util-to-string": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", + "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/mdn-data": { - "version": "2.0.4", - "license": "CC0-1.0" + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", + "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" }, "node_modules/mdurl": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" }, "node_modules/media-typer": { "version": "0.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", "engines": { "node": ">= 0.6" } }, "node_modules/memfs": { - "version": "3.4.1", - "license": "Unlicense", + "version": "3.4.7", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.4.7.tgz", + "integrity": "sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw==", "dependencies": { - "fs-monkey": "1.0.3" + "fs-monkey": "^1.0.3" }, "engines": { "node": ">= 4.0.0" @@ -12616,29 +15676,34 @@ }, "node_modules/merge-descriptors": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, "node_modules/merge-stream": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" }, "node_modules/merge2": { "version": "1.4.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", "engines": { "node": ">= 8" } }, "node_modules/methods": { "version": "1.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", "engines": { "node": ">= 0.6" } }, "node_modules/micromatch": { "version": "4.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", "dependencies": { "braces": "^3.0.2", "picomatch": "^2.3.1" @@ -12649,7 +15714,8 @@ }, "node_modules/miller-rabin": { "version": "4.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", + "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", "dependencies": { "bn.js": "^4.0.0", "brorand": "^1.0.1" @@ -12660,11 +15726,13 @@ }, "node_modules/miller-rabin/node_modules/bn.js": { "version": "4.12.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/mime": { "version": "1.6.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", "bin": { "mime": "cli.js" }, @@ -12674,14 +15742,16 @@ }, "node_modules/mime-db": { "version": "1.52.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { "version": "2.1.35", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "dependencies": { "mime-db": "1.52.0" }, @@ -12691,14 +15761,16 @@ }, "node_modules/mimic-fn": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "engines": { "node": ">=6" } }, "node_modules/mimic-response": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", "engines": { "node": ">=10" }, @@ -12708,33 +15780,25 @@ }, "node_modules/min-document": { "version": "2.19.0", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", + "integrity": "sha512-9Wy1B3m3f66bPPmU5hdA4DR4PB2OfDU/+GS3yAB7IQozE3tqXaVv2zOjgla7MEGSRv95+ILmOuvhLkOK6wJtCQ==", "dependencies": { "dom-walk": "^0.1.0" } }, "node_modules/min-indent": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", "dev": true, - "license": "MIT", "engines": { "node": ">=4" } }, - "node_modules/mini-create-react-context": { - "version": "0.4.1", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.12.1", - "tiny-warning": "^1.0.3" - }, - "peerDependencies": { - "prop-types": "^15.0.0", - "react": "^0.14.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" - } - }, "node_modules/mini-css-extract-plugin": { - "version": "2.6.0", - "license": "MIT", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz", + "integrity": "sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg==", "dependencies": { "schema-utils": "^4.0.0" }, @@ -12751,7 +15815,8 @@ }, "node_modules/mini-css-extract-plugin/node_modules/ajv": { "version": "8.11.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -12765,7 +15830,8 @@ }, "node_modules/mini-css-extract-plugin/node_modules/ajv-keywords": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, @@ -12775,7 +15841,8 @@ }, "node_modules/mini-css-extract-plugin/node_modules/schema-utils": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -12792,15 +15859,18 @@ }, "node_modules/minimalistic-assert": { "version": "1.0.1", - "license": "ISC" + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" }, "node_modules/minimalistic-crypto-utils": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==" }, "node_modules/minimatch": { "version": "3.1.2", - "license": "ISC", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -12809,12 +15879,17 @@ } }, "node_modules/minimist": { - "version": "1.2.6", - "license": "MIT" + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", + "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/mkdirp": { "version": "0.5.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", "dependencies": { "minimist": "^1.2.6" }, @@ -12824,21 +15899,24 @@ }, "node_modules/mkdirp-classic": { "version": "0.5.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, "node_modules/mobx": { - "version": "6.5.0", - "license": "MIT", + "version": "6.6.2", + "resolved": "https://registry.npmjs.org/mobx/-/mobx-6.6.2.tgz", + "integrity": "sha512-IOpS0bf3+hXIhDIy+CmlNMBfFpAbHS0aVHcNC+xH/TFYEKIIVDKNYRh9eKlXuVfJ1iRKAp0cRVmO145CyJAMVQ==", "funding": { "type": "opencollective", "url": "https://opencollective.com/mobx" } }, "node_modules/mobx-react": { - "version": "7.3.0", - "license": "MIT", + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/mobx-react/-/mobx-react-7.5.3.tgz", + "integrity": "sha512-+ltotliKt4Bjn3d8taZH/VFAcRUbaASvsM8/QSvmHXcZ++RZwaFtjl9JkIosy1byaJGEDS3EFFx2InRm2VaSUw==", "dependencies": { - "mobx-react-lite": "^3.3.0" + "mobx-react-lite": "^3.4.0" }, "funding": { "type": "opencollective", @@ -12846,7 +15924,7 @@ }, "peerDependencies": { "mobx": "^6.1.0", - "react": "^16.8.0 || ^17" + "react": "^16.8.0 || ^17 || ^18" }, "peerDependenciesMeta": { "react-dom": { @@ -12858,15 +15936,16 @@ } }, "node_modules/mobx-react-lite": { - "version": "3.3.0", - "license": "MIT", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/mobx-react-lite/-/mobx-react-lite-3.4.0.tgz", + "integrity": "sha512-bRuZp3C0itgLKHu/VNxi66DN/XVkQG7xtoBVWxpvC5FhAqbOCP21+nPhULjnzEqd7xBMybp6KwytdUpZKEgpIQ==", "funding": { "type": "opencollective", "url": "https://opencollective.com/mobx" }, "peerDependencies": { "mobx": "^6.1.0", - "react": "^16.8.0 || ^17" + "react": "^16.8.0 || ^17 || ^18" }, "peerDependenciesMeta": { "react-dom": { @@ -12878,19 +15957,22 @@ } }, "node_modules/mrmime": { - "version": "1.0.0", - "license": "MIT", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz", + "integrity": "sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==", "engines": { "node": ">=10" } }, "node_modules/ms": { "version": "2.1.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/multicast-dns": { - "version": "7.2.4", - "license": "MIT", + "version": "7.2.5", + "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", + "integrity": "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==", "dependencies": { "dns-packet": "^5.2.2", "thunky": "^1.0.2" @@ -12900,8 +15982,9 @@ } }, "node_modules/nanoid": { - "version": "3.3.3", - "license": "MIT", + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", + "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==", "bin": { "nanoid": "bin/nanoid.cjs" }, @@ -12911,35 +15994,41 @@ }, "node_modules/napi-build-utils": { "version": "1.0.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", + "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==" }, "node_modules/natural-compare": { "version": "1.4.0", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true }, "node_modules/negotiator": { "version": "0.6.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", "engines": { "node": ">= 0.6" } }, "node_modules/neo-async": { "version": "2.6.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" }, "node_modules/no-case": { "version": "3.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", "dependencies": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "node_modules/node-abi": { - "version": "3.15.0", - "license": "MIT", + "version": "3.26.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.26.0.tgz", + "integrity": "sha512-jRVtMFTChbi2i/jqo/i2iP9634KMe+7K1v35mIdj3Mn59i5q27ZYhn+sW6npISM/PQg7HrP2kwtRBMmh5Uvzdg==", "dependencies": { "semver": "^7.3.5" }, @@ -12948,19 +16037,22 @@ } }, "node_modules/node-addon-api": { - "version": "4.3.0", - "license": "MIT" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.0.0.tgz", + "integrity": "sha512-CvkDw2OEnme7ybCykJpVcKH+uAOLV2qLqiyla128dN9TkEWfrYmxG6C2boDe5KcNQqZF3orkqzGgOMvZ/JNekA==" }, "node_modules/node-emoji": { "version": "1.11.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.11.0.tgz", + "integrity": "sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==", "dependencies": { "lodash": "^4.17.21" } }, "node_modules/node-fetch": { "version": "2.6.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "dependencies": { "whatwg-url": "^5.0.0" }, @@ -12978,7 +16070,8 @@ }, "node_modules/node-fetch-h2": { "version": "2.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/node-fetch-h2/-/node-fetch-h2-2.3.0.tgz", + "integrity": "sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==", "dependencies": { "http2-client": "^1.2.5" }, @@ -12988,15 +16081,18 @@ }, "node_modules/node-fetch/node_modules/tr46": { "version": "0.0.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "node_modules/node-fetch/node_modules/webidl-conversions": { "version": "3.0.1", - "license": "BSD-2-Clause" + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "node_modules/node-fetch/node_modules/whatwg-url": { "version": "5.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" @@ -13004,19 +16100,22 @@ }, "node_modules/node-forge": { "version": "1.3.1", - "license": "(BSD-3-Clause OR GPL-2.0)", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", "engines": { "node": ">= 6.13.0" } }, "node_modules/node-int64": { "version": "0.4.0", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true }, "node_modules/node-polyfill-webpack-plugin": { "version": "1.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/node-polyfill-webpack-plugin/-/node-polyfill-webpack-plugin-1.1.4.tgz", + "integrity": "sha512-Z0XTKj1wRWO8o/Vjobsw5iOJCN+Sua3EZEUc2Ziy9CyVvmHKu6o+t4gUH9GOE0czyPR94LI6ZCV/PpcM8b5yow==", "dependencies": { "assert": "^2.0.0", "browserify-zlib": "^0.2.0", @@ -13050,20 +16149,46 @@ "webpack": ">=5" } }, + "node_modules/node-polyfill-webpack-plugin/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, "node_modules/node-readfiles": { "version": "0.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/node-readfiles/-/node-readfiles-0.2.0.tgz", + "integrity": "sha512-SU00ZarexNlE4Rjdm83vglt5Y9yiQ+XI1XpflWlb7q7UTN1JUItm69xMeiQCTxtTfnzt+83T8Cx+vI2ED++VDA==", "dependencies": { "es6-promise": "^3.2.1" } }, "node_modules/node-releases": { - "version": "2.0.4", - "license": "MIT" + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", + "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==" }, "node_modules/node-vibrant": { "version": "3.1.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/node-vibrant/-/node-vibrant-3.1.6.tgz", + "integrity": "sha512-Wlc/hQmBMOu6xon12ZJHS2N3M+I6J8DhrD3Yo6m5175v8sFkVIN+UjhKVRcO+fqvre89ASTpmiFEP3nPO13SwA==", "dependencies": { "@jimp/custom": "^0.16.1", "@jimp/plugin-resize": "^0.16.1", @@ -13076,25 +16201,29 @@ }, "node_modules/node-vibrant/node_modules/@types/node": { "version": "10.17.60", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", + "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==" }, "node_modules/normalize-path": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "engines": { "node": ">=0.10.0" } }, "node_modules/normalize-range": { "version": "0.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", "engines": { "node": ">=0.10.0" } }, "node_modules/normalize-url": { "version": "6.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", + "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", "engines": { "node": ">=10" }, @@ -13104,7 +16233,8 @@ }, "node_modules/npm-run-path": { "version": "4.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dependencies": { "path-key": "^3.0.0" }, @@ -13112,23 +16242,15 @@ "node": ">=8" } }, - "node_modules/npmlog": { - "version": "4.1.2", - "license": "ISC", - "dependencies": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, "node_modules/nprogress": { "version": "0.2.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz", + "integrity": "sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA==" }, "node_modules/nth-check": { - "version": "2.0.1", - "license": "BSD-2-Clause", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", "dependencies": { "boolbase": "^1.0.0" }, @@ -13136,28 +16258,24 @@ "url": "https://github.com/fb55/nth-check?sponsor=1" } }, - "node_modules/number-is-nan": { - "version": "1.0.1", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/nwsapi": { - "version": "2.2.0", - "dev": true, - "license": "MIT" + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.2.tgz", + "integrity": "sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw==", + "dev": true }, "node_modules/oas-kit-common": { "version": "1.0.8", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/oas-kit-common/-/oas-kit-common-1.0.8.tgz", + "integrity": "sha512-pJTS2+T0oGIwgjGpw7sIRU8RQMcUoKCDWFLdBqKB2BNmGpbBMH2sdqAaOXUg8OzonZHU0L7vfJu1mJFEiYDWOQ==", "dependencies": { "fast-safe-stringify": "^2.0.7" } }, "node_modules/oas-linter": { "version": "3.2.2", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/oas-linter/-/oas-linter-3.2.2.tgz", + "integrity": "sha512-KEGjPDVoU5K6swgo9hJVA/qYGlwfbFx+Kg2QB/kd7rzV5N8N5Mg6PlsoCMohVnQmo+pzJap/F610qTodKzecGQ==", "dependencies": { "@exodus/schemasafe": "^1.0.0-rc.2", "should": "^13.2.1", @@ -13169,7 +16287,8 @@ }, "node_modules/oas-resolver": { "version": "2.5.6", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/oas-resolver/-/oas-resolver-2.5.6.tgz", + "integrity": "sha512-Yx5PWQNZomfEhPPOphFbZKi9W93CocQj18NlD2Pa4GWZzdZpSJvYwoiuurRI7m3SpcChrnO08hkuQDL3FGsVFQ==", "dependencies": { "node-fetch-h2": "^2.3.0", "oas-kit-common": "^1.0.8", @@ -13184,13 +16303,58 @@ "url": "https://github.com/Mermade/oas-kit?sponsor=1" } }, + "node_modules/oas-resolver/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/oas-resolver/node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/oas-resolver/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/oas-resolver/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, "node_modules/oas-resolver/node_modules/emoji-regex": { "version": "8.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/oas-resolver/node_modules/string-width": { "version": "4.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -13200,11 +16364,28 @@ "node": ">=8" } }, + "node_modules/oas-resolver/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/oas-resolver/node_modules/yargs": { - "version": "17.4.1", - "license": "MIT", + "version": "17.6.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.0.tgz", + "integrity": "sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g==", "dependencies": { - "cliui": "^7.0.2", + "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", @@ -13217,22 +16398,25 @@ } }, "node_modules/oas-resolver/node_modules/yargs-parser": { - "version": "21.0.1", - "license": "ISC", + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "engines": { "node": ">=12" } }, "node_modules/oas-schema-walker": { "version": "1.1.5", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/oas-schema-walker/-/oas-schema-walker-1.1.5.tgz", + "integrity": "sha512-2yucenq1a9YPmeNExoUa9Qwrt9RFkjqaMAA1X+U7sbb0AqBeTIdMHky9SQQ6iN94bO5NW0W4TRYXerG+BdAvAQ==", "funding": { "url": "https://github.com/Mermade/oas-kit?sponsor=1" } }, "node_modules/oas-validator": { "version": "5.0.8", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/oas-validator/-/oas-validator-5.0.8.tgz", + "integrity": "sha512-cu20/HE5N5HKqVygs3dt94eYJfBi0TsZvPVXDhbXQHiEityDN+RROTleefoKRKKJ9dFAF2JBkDHgvWj0sjKGmw==", "dependencies": { "call-me-maybe": "^1.0.1", "oas-kit-common": "^1.0.8", @@ -13249,21 +16433,24 @@ }, "node_modules/object-assign": { "version": "4.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "engines": { "node": ">=0.10.0" } }, "node_modules/object-inspect": { - "version": "1.12.0", - "license": "MIT", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/object-is": { "version": "1.1.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz", + "integrity": "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.1.3" @@ -13277,49 +16464,21 @@ }, "node_modules/object-keys": { "version": "1.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", "engines": { "node": ">= 0.4" } }, "node_modules/object.assign": { - "version": "4.1.2", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.0", - "define-properties": "^1.1.3", - "has-symbols": "^1.0.1", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.getownpropertydescriptors": { - "version": "2.1.3", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" - }, - "engines": { - "node": ">= 0.8" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.values": { - "version": "1.1.5", - "license": "MIT", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", + "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" + "define-properties": "^1.1.4", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" }, "engines": { "node": ">= 0.4" @@ -13330,15 +16489,18 @@ }, "node_modules/obuf": { "version": "1.1.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" }, "node_modules/omggif": { "version": "1.0.10", - "license": "MIT" + "resolved": "https://registry.npmjs.org/omggif/-/omggif-1.0.10.tgz", + "integrity": "sha512-LMJTtvgc/nugXj0Vcrrs68Mn2D1r0zf630VNtqtpI1FEO7e+O9FP4gqs9AcnBaSEeoHIPm28u6qgPR0oyEpGSw==" }, "node_modules/on-finished": { "version": "2.4.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "dependencies": { "ee-first": "1.1.1" }, @@ -13348,21 +16510,24 @@ }, "node_modules/on-headers": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", "engines": { "node": ">= 0.8" } }, "node_modules/once": { "version": "1.4.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "dependencies": { "wrappy": "1" } }, "node_modules/onetime": { "version": "5.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dependencies": { "mimic-fn": "^2.1.0" }, @@ -13375,7 +16540,8 @@ }, "node_modules/open": { "version": "8.4.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", + "integrity": "sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==", "dependencies": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", @@ -13389,8 +16555,9 @@ } }, "node_modules/openapi-sampler": { - "version": "1.2.3", - "license": "MIT", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/openapi-sampler/-/openapi-sampler-1.3.0.tgz", + "integrity": "sha512-2QfjK1oM9Sv0q82Ae1RrUe3yfFmAyjF548+6eAeb+h/cL1Uj51TW4UezraBEvwEdzoBgfo4AaTLVFGTKj+yYDw==", "dependencies": { "@types/json-schema": "^7.0.7", "json-pointer": "0.6.2" @@ -13398,15 +16565,17 @@ }, "node_modules/opener": { "version": "1.5.2", - "license": "(WTFPL OR MIT)", + "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", + "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==", "bin": { "opener": "bin/opener-bin.js" } }, "node_modules/optionator": { "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", "dev": true, - "license": "MIT", "dependencies": { "deep-is": "~0.1.3", "fast-levenshtein": "~2.0.6", @@ -13421,18 +16590,27 @@ }, "node_modules/os-browserify": { "version": "0.3.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", + "integrity": "sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A==" + }, + "node_modules/ospath": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", + "integrity": "sha512-o6E5qJV5zkAbIDNhGSIlyOhScKXgQrSRMilfph0clDfM0nEnBOlKlH4sWDmG95BW/CvwNz0vmm7dJVtU2KlMiA==", + "dev": true }, "node_modules/p-cancelable": { "version": "1.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", + "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==", "engines": { "node": ">=6" } }, "node_modules/p-limit": { "version": "2.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dependencies": { "p-try": "^2.0.0" }, @@ -13445,7 +16623,8 @@ }, "node_modules/p-locate": { "version": "4.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dependencies": { "p-limit": "^2.2.0" }, @@ -13455,7 +16634,8 @@ }, "node_modules/p-map": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", "dependencies": { "aggregate-error": "^3.0.0" }, @@ -13468,7 +16648,8 @@ }, "node_modules/p-retry": { "version": "4.6.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz", + "integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==", "dependencies": { "@types/retry": "0.12.0", "retry": "^0.13.1" @@ -13479,14 +16660,16 @@ }, "node_modules/p-try": { "version": "2.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "engines": { "node": ">=6" } }, "node_modules/package-json": { "version": "6.5.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", + "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", "dependencies": { "got": "^9.6.0", "registry-auth-token": "^4.0.0", @@ -13499,18 +16682,21 @@ }, "node_modules/package-json/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/pako": { "version": "1.0.11", - "license": "(MIT AND Zlib)" + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==" }, "node_modules/param-case": { "version": "3.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", + "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", "dependencies": { "dot-case": "^3.0.4", "tslib": "^2.0.3" @@ -13518,7 +16704,8 @@ }, "node_modules/parent-module": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dependencies": { "callsites": "^3.0.0" }, @@ -13528,7 +16715,8 @@ }, "node_modules/parse-asn1": { "version": "5.1.6", - "license": "ISC", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz", + "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==", "dependencies": { "asn1.js": "^5.2.0", "browserify-aes": "^1.0.0", @@ -13539,15 +16727,18 @@ }, "node_modules/parse-bmfont-ascii": { "version": "1.0.6", - "license": "MIT" + "resolved": "https://registry.npmjs.org/parse-bmfont-ascii/-/parse-bmfont-ascii-1.0.6.tgz", + "integrity": "sha512-U4RrVsUFCleIOBsIGYOMKjn9PavsGOXxbvYGtMOEfnId0SVNsgehXh1DxUdVPLoxd5mvcEtvmKs2Mmf0Mpa1ZA==" }, "node_modules/parse-bmfont-binary": { "version": "1.0.6", - "license": "MIT" + "resolved": "https://registry.npmjs.org/parse-bmfont-binary/-/parse-bmfont-binary-1.0.6.tgz", + "integrity": "sha512-GxmsRea0wdGdYthjuUeWTMWPqm2+FAd4GI8vCvhgJsFnoGhTrLhXDDupwTo7rXVAgaLIGoVHDZS9p/5XbSqeWA==" }, "node_modules/parse-bmfont-xml": { "version": "1.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/parse-bmfont-xml/-/parse-bmfont-xml-1.1.4.tgz", + "integrity": "sha512-bjnliEOmGv3y1aMEfREMBJ9tfL3WR0i0CKPj61DnSLaoxWR3nLrsQrEbCId/8rF4NyRF0cCqisSVXyQYWM+mCQ==", "dependencies": { "xml-parse-from-string": "^1.0.0", "xml2js": "^0.4.5" @@ -13555,7 +16746,8 @@ }, "node_modules/parse-entities": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", + "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", "dependencies": { "character-entities": "^1.0.0", "character-entities-legacy": "^1.0.0", @@ -13571,11 +16763,13 @@ }, "node_modules/parse-headers": { "version": "2.0.5", - "license": "MIT" + "resolved": "https://registry.npmjs.org/parse-headers/-/parse-headers-2.0.5.tgz", + "integrity": "sha512-ft3iAoLOB/MlwbNXgzy43SWGP6sQki2jQvAyBg/zDFAgr9bfNWZIUj42Kw2eJIl8kEi4PbgE6U1Zau/HwI75HA==" }, "node_modules/parse-json": { "version": "5.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", "dependencies": { "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", @@ -13591,29 +16785,44 @@ }, "node_modules/parse-numeric-range": { "version": "1.3.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz", + "integrity": "sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==" }, "node_modules/parse5": { - "version": "6.0.1", - "license": "MIT" + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.1.tgz", + "integrity": "sha512-kwpuwzB+px5WUg9pyK0IcK/shltJN5/OVhQagxhCQNtT9Y9QRZqNY2e1cmbu/paRh5LMnz/oVTVLBpjFmMZhSg==", + "dependencies": { + "entities": "^4.4.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } }, "node_modules/parse5-htmlparser2-tree-adapter": { - "version": "6.0.1", - "license": "MIT", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz", + "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==", "dependencies": { - "parse5": "^6.0.1" + "domhandler": "^5.0.2", + "parse5": "^7.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" } }, "node_modules/parseurl": { "version": "1.3.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", "engines": { "node": ">= 0.8" } }, "node_modules/pascal-case": { "version": "3.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" @@ -13621,54 +16830,63 @@ }, "node_modules/path-browserify": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==" }, "node_modules/path-exists": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "engines": { "node": ">=8" } }, "node_modules/path-is-absolute": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", "engines": { "node": ">=0.10.0" } }, "node_modules/path-is-inside": { "version": "1.0.2", - "license": "(WTFPL OR MIT)" + "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", + "integrity": "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==" }, "node_modules/path-key": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "engines": { "node": ">=8" } }, "node_modules/path-parse": { "version": "1.0.7", - "license": "MIT" + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "node_modules/path-to-regexp": { "version": "1.8.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", + "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", "dependencies": { "isarray": "0.0.1" } }, "node_modules/path-type": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "engines": { "node": ">=8" } }, "node_modules/pbkdf2": { "version": "3.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", + "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", "dependencies": { "create-hash": "^1.1.2", "create-hmac": "^1.1.4", @@ -13680,21 +16898,37 @@ "node": ">=0.12" } }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "dev": true + }, "node_modules/perfect-scrollbar": { "version": "1.5.5", - "license": "MIT" + "resolved": "https://registry.npmjs.org/perfect-scrollbar/-/perfect-scrollbar-1.5.5.tgz", + "integrity": "sha512-dzalfutyP3e/FOpdlhVryN4AJ5XDVauVWxybSkLZmakFE2sS3y3pc4JnSprw8tGmHvkaG5Edr5T7LBTZ+WWU2g==" + }, + "node_modules/performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", + "dev": true }, "node_modules/phin": { "version": "2.9.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/phin/-/phin-2.9.3.tgz", + "integrity": "sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA==" }, "node_modules/picocolors": { "version": "1.0.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" }, "node_modules/picomatch": { "version": "2.3.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "engines": { "node": ">=8.6" }, @@ -13702,17 +16936,28 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/pirates": { "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==", "dev": true, - "license": "MIT", "engines": { "node": ">= 6" } }, "node_modules/pixelmatch": { "version": "4.0.2", - "license": "ISC", + "resolved": "https://registry.npmjs.org/pixelmatch/-/pixelmatch-4.0.2.tgz", + "integrity": "sha512-J8B6xqiO37sU/gkcMglv6h5Jbd9xNER7aHzpfRdNmV4IbQBzBpe4l9XmbG+xPF/znacgu2jfEw+wHffaq/YkXA==", "dependencies": { "pngjs": "^3.0.0" }, @@ -13722,7 +16967,8 @@ }, "node_modules/pkg-dir": { "version": "4.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "dependencies": { "find-up": "^4.0.0" }, @@ -13732,7 +16978,8 @@ }, "node_modules/pkg-up": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz", + "integrity": "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==", "dependencies": { "find-up": "^3.0.0" }, @@ -13742,7 +16989,8 @@ }, "node_modules/pkg-up/node_modules/find-up": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dependencies": { "locate-path": "^3.0.0" }, @@ -13752,7 +17000,8 @@ }, "node_modules/pkg-up/node_modules/locate-path": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dependencies": { "p-locate": "^3.0.0", "path-exists": "^3.0.0" @@ -13763,7 +17012,8 @@ }, "node_modules/pkg-up/node_modules/p-locate": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "dependencies": { "p-limit": "^2.0.0" }, @@ -13773,28 +17023,32 @@ }, "node_modules/pkg-up/node_modules/path-exists": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", "engines": { "node": ">=4" } }, "node_modules/pluralize": { "version": "8.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", + "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", "engines": { "node": ">=4" } }, "node_modules/pngjs": { "version": "3.4.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-3.4.0.tgz", + "integrity": "sha512-NCrCHhWmnQklfH4MtJMRjZ2a8c80qXeMlQMv2uVp9ISJMTt562SbGd6n2oq0PaPgKm7Z6pL9E2UlLIhC+SHL3w==", "engines": { "node": ">=4.0.0" } }, "node_modules/polished": { "version": "4.2.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/polished/-/polished-4.2.2.tgz", + "integrity": "sha512-Sz2Lkdxz6F2Pgnpi9U5Ng/WdWAUZxmHrNPoVlm3aAemxoy2Qy7LGjQg4uf8qKelDAUW94F4np3iH2YPf2qefcQ==", "dependencies": { "@babel/runtime": "^7.17.8" }, @@ -13802,27 +17056,10 @@ "node": ">=10" } }, - "node_modules/portfinder": { - "version": "1.0.28", - "license": "MIT", - "dependencies": { - "async": "^2.6.2", - "debug": "^3.1.1", - "mkdirp": "^0.5.5" - }, - "engines": { - "node": ">= 0.12.0" - } - }, - "node_modules/portfinder/node_modules/debug": { - "version": "3.2.7", - "license": "MIT", - "dependencies": { - "ms": "^2.1.1" - } - }, "node_modules/postcss": { - "version": "8.4.13", + "version": "8.4.18", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.18.tgz", + "integrity": "sha512-Wi8mWhncLJm11GATDaQKobXSNEYGUHeQLiQqDFG1qQ5UTDPTEvKw0Xt5NsTpktGTwLps3ByrWsBrG0rB8YQ9oA==", "funding": [ { "type": "opencollective", @@ -13833,9 +17070,8 @@ "url": "https://tidelift.com/funding/github/npm/postcss" } ], - "license": "MIT", "dependencies": { - "nanoid": "^3.3.3", + "nanoid": "^3.3.4", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" }, @@ -13845,7 +17081,8 @@ }, "node_modules/postcss-calc": { "version": "8.2.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz", + "integrity": "sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==", "dependencies": { "postcss-selector-parser": "^6.0.9", "postcss-value-parser": "^4.2.0" @@ -13856,7 +17093,8 @@ }, "node_modules/postcss-colormin": { "version": "5.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.3.0.tgz", + "integrity": "sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg==", "dependencies": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0", @@ -13871,9 +17109,11 @@ } }, "node_modules/postcss-convert-values": { - "version": "5.1.0", - "license": "MIT", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz", + "integrity": "sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g==", "dependencies": { + "browserslist": "^4.20.3", "postcss-value-parser": "^4.2.0" }, "engines": { @@ -13884,8 +17124,9 @@ } }, "node_modules/postcss-discard-comments": { - "version": "5.1.1", - "license": "MIT", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz", + "integrity": "sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ==", "engines": { "node": "^10 || ^12 || >=14.0" }, @@ -13895,7 +17136,8 @@ }, "node_modules/postcss-discard-duplicates": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz", + "integrity": "sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw==", "engines": { "node": "^10 || ^12 || >=14.0" }, @@ -13905,7 +17147,8 @@ }, "node_modules/postcss-discard-empty": { "version": "5.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz", + "integrity": "sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A==", "engines": { "node": "^10 || ^12 || >=14.0" }, @@ -13915,7 +17158,8 @@ }, "node_modules/postcss-discard-overridden": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz", + "integrity": "sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw==", "engines": { "node": "^10 || ^12 || >=14.0" }, @@ -13925,7 +17169,8 @@ }, "node_modules/postcss-discard-unused": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-5.1.0.tgz", + "integrity": "sha512-KwLWymI9hbwXmJa0dkrzpRbSJEh0vVUd7r8t0yOGPcfKzyJJxFM8kLyC5Ev9avji6nY95pOp1W6HqIrfT+0VGw==", "dependencies": { "postcss-selector-parser": "^6.0.5" }, @@ -13938,7 +17183,8 @@ }, "node_modules/postcss-loader": { "version": "6.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-6.2.1.tgz", + "integrity": "sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q==", "dependencies": { "cosmiconfig": "^7.0.0", "klona": "^2.0.5", @@ -13958,7 +17204,8 @@ }, "node_modules/postcss-merge-idents": { "version": "5.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-5.1.1.tgz", + "integrity": "sha512-pCijL1TREiCoog5nQp7wUe+TUonA2tC2sQ54UGeMmryK3UFGIYKqDyjnqd6RcuI4znFn9hWSLNN8xKE/vWcUQw==", "dependencies": { "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" @@ -13971,8 +17218,9 @@ } }, "node_modules/postcss-merge-longhand": { - "version": "5.1.4", - "license": "MIT", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz", + "integrity": "sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw==", "dependencies": { "postcss-value-parser": "^4.2.0", "stylehacks": "^5.1.0" @@ -13985,8 +17233,9 @@ } }, "node_modules/postcss-merge-rules": { - "version": "5.1.1", - "license": "MIT", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz", + "integrity": "sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ==", "dependencies": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0", @@ -14002,7 +17251,8 @@ }, "node_modules/postcss-minify-font-values": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz", + "integrity": "sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14015,7 +17265,8 @@ }, "node_modules/postcss-minify-gradients": { "version": "5.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz", + "integrity": "sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw==", "dependencies": { "colord": "^2.9.1", "cssnano-utils": "^3.1.0", @@ -14029,8 +17280,9 @@ } }, "node_modules/postcss-minify-params": { - "version": "5.1.2", - "license": "MIT", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz", + "integrity": "sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg==", "dependencies": { "browserslist": "^4.16.6", "cssnano-utils": "^3.1.0", @@ -14044,8 +17296,9 @@ } }, "node_modules/postcss-minify-selectors": { - "version": "5.2.0", - "license": "MIT", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz", + "integrity": "sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg==", "dependencies": { "postcss-selector-parser": "^6.0.5" }, @@ -14058,8 +17311,9 @@ }, "node_modules/postcss-modules-extract-imports": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz", + "integrity": "sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ==", "dev": true, - "license": "ISC", "dependencies": { "postcss": "^7.0.5" }, @@ -14069,13 +17323,15 @@ }, "node_modules/postcss-modules-extract-imports/node_modules/picocolors": { "version": "0.2.1", - "dev": true, - "license": "ISC" + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", + "dev": true }, "node_modules/postcss-modules-extract-imports/node_modules/postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, - "license": "MIT", "dependencies": { "picocolors": "^0.2.1", "source-map": "^0.6.1" @@ -14090,8 +17346,9 @@ }, "node_modules/postcss-modules-local-by-default": { "version": "3.0.3", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-3.0.3.tgz", + "integrity": "sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw==", "dev": true, - "license": "MIT", "dependencies": { "icss-utils": "^4.1.1", "postcss": "^7.0.32", @@ -14104,13 +17361,15 @@ }, "node_modules/postcss-modules-local-by-default/node_modules/picocolors": { "version": "0.2.1", - "dev": true, - "license": "ISC" + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", + "dev": true }, "node_modules/postcss-modules-local-by-default/node_modules/postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, - "license": "MIT", "dependencies": { "picocolors": "^0.2.1", "source-map": "^0.6.1" @@ -14125,8 +17384,9 @@ }, "node_modules/postcss-modules-scope": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-2.2.0.tgz", + "integrity": "sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ==", "dev": true, - "license": "ISC", "dependencies": { "postcss": "^7.0.6", "postcss-selector-parser": "^6.0.0" @@ -14137,13 +17397,15 @@ }, "node_modules/postcss-modules-scope/node_modules/picocolors": { "version": "0.2.1", - "dev": true, - "license": "ISC" + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", + "dev": true }, "node_modules/postcss-modules-scope/node_modules/postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, - "license": "MIT", "dependencies": { "picocolors": "^0.2.1", "source-map": "^0.6.1" @@ -14158,8 +17420,9 @@ }, "node_modules/postcss-modules-values": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-3.0.0.tgz", + "integrity": "sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg==", "dev": true, - "license": "ISC", "dependencies": { "icss-utils": "^4.0.0", "postcss": "^7.0.6" @@ -14167,13 +17430,15 @@ }, "node_modules/postcss-modules-values/node_modules/picocolors": { "version": "0.2.1", - "dev": true, - "license": "ISC" + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", + "dev": true }, "node_modules/postcss-modules-values/node_modules/postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, - "license": "MIT", "dependencies": { "picocolors": "^0.2.1", "source-map": "^0.6.1" @@ -14188,7 +17453,8 @@ }, "node_modules/postcss-normalize-charset": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz", + "integrity": "sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg==", "engines": { "node": "^10 || ^12 || >=14.0" }, @@ -14198,7 +17464,8 @@ }, "node_modules/postcss-normalize-display-values": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz", + "integrity": "sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14210,8 +17477,9 @@ } }, "node_modules/postcss-normalize-positions": { - "version": "5.1.0", - "license": "MIT", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz", + "integrity": "sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14223,8 +17491,9 @@ } }, "node_modules/postcss-normalize-repeat-style": { - "version": "5.1.0", - "license": "MIT", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz", + "integrity": "sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14237,7 +17506,8 @@ }, "node_modules/postcss-normalize-string": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz", + "integrity": "sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14250,7 +17520,8 @@ }, "node_modules/postcss-normalize-timing-functions": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz", + "integrity": "sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14263,7 +17534,8 @@ }, "node_modules/postcss-normalize-unicode": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz", + "integrity": "sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ==", "dependencies": { "browserslist": "^4.16.6", "postcss-value-parser": "^4.2.0" @@ -14277,7 +17549,8 @@ }, "node_modules/postcss-normalize-url": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz", + "integrity": "sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew==", "dependencies": { "normalize-url": "^6.0.1", "postcss-value-parser": "^4.2.0" @@ -14291,7 +17564,8 @@ }, "node_modules/postcss-normalize-whitespace": { "version": "5.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz", + "integrity": "sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14303,8 +17577,9 @@ } }, "node_modules/postcss-ordered-values": { - "version": "5.1.1", - "license": "MIT", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz", + "integrity": "sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ==", "dependencies": { "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" @@ -14318,7 +17593,8 @@ }, "node_modules/postcss-reduce-idents": { "version": "5.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-5.2.0.tgz", + "integrity": "sha512-BTrLjICoSB6gxbc58D5mdBK8OhXRDqud/zodYfdSi52qvDHdMwk+9kB9xsM8yJThH/sZU5A6QVSmMmaN001gIg==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14331,7 +17607,8 @@ }, "node_modules/postcss-reduce-initial": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz", + "integrity": "sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw==", "dependencies": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0" @@ -14345,7 +17622,8 @@ }, "node_modules/postcss-reduce-transforms": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz", + "integrity": "sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ==", "dependencies": { "postcss-value-parser": "^4.2.0" }, @@ -14358,7 +17636,8 @@ }, "node_modules/postcss-selector-parser": { "version": "6.0.10", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", + "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" @@ -14368,21 +17647,23 @@ } }, "node_modules/postcss-sort-media-queries": { - "version": "4.2.1", - "license": "MIT", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-4.3.0.tgz", + "integrity": "sha512-jAl8gJM2DvuIJiI9sL1CuiHtKM4s5aEIomkU8G3LFvbP+p8i7Sz8VV63uieTgoewGqKbi+hxBTiOKJlB35upCg==", "dependencies": { - "sort-css-media-queries": "2.0.4" + "sort-css-media-queries": "2.1.0" }, "engines": { "node": ">=10.0.0" }, "peerDependencies": { - "postcss": "^8.4.4" + "postcss": "^8.4.16" } }, "node_modules/postcss-svgo": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz", + "integrity": "sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA==", "dependencies": { "postcss-value-parser": "^4.2.0", "svgo": "^2.7.0" @@ -14394,50 +17675,10 @@ "postcss": "^8.2.15" } }, - "node_modules/postcss-svgo/node_modules/commander": { - "version": "7.2.0", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/postcss-svgo/node_modules/css-tree": { - "version": "1.1.3", - "license": "MIT", - "dependencies": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/postcss-svgo/node_modules/mdn-data": { - "version": "2.0.14", - "license": "CC0-1.0" - }, - "node_modules/postcss-svgo/node_modules/svgo": { - "version": "2.8.0", - "license": "MIT", - "dependencies": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^4.1.3", - "css-tree": "^1.1.3", - "csso": "^4.2.0", - "picocolors": "^1.0.0", - "stable": "^0.1.8" - }, - "bin": { - "svgo": "bin/svgo" - }, - "engines": { - "node": ">=10.13.0" - } - }, "node_modules/postcss-unique-selectors": { "version": "5.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz", + "integrity": "sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA==", "dependencies": { "postcss-selector-parser": "^6.0.5" }, @@ -14450,11 +17691,13 @@ }, "node_modules/postcss-value-parser": { "version": "4.2.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" }, "node_modules/postcss-zindex": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-5.1.0.tgz", + "integrity": "sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A==", "engines": { "node": "^10 || ^12 || >=14.0" }, @@ -14463,8 +17706,9 @@ } }, "node_modules/prebuild-install": { - "version": "7.1.0", - "license": "MIT", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz", + "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==", "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", @@ -14473,7 +17717,6 @@ "mkdirp-classic": "^0.5.3", "napi-build-utils": "^1.0.1", "node-abi": "^3.3.0", - "npmlog": "^4.0.1", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", @@ -14489,6 +17732,8 @@ }, "node_modules/prelude-ls": { "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", "dev": true, "engines": { "node": ">= 0.8.0" @@ -14496,14 +17741,28 @@ }, "node_modules/prepend-http": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==", "engines": { "node": ">=4" } }, + "node_modules/pretty-bytes": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", + "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/pretty-error": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz", + "integrity": "sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==", "dependencies": { "lodash": "^4.17.20", "renderkid": "^3.0.0" @@ -14511,8 +17770,9 @@ }, "node_modules/pretty-format": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", "dev": true, - "license": "MIT", "dependencies": { "ansi-regex": "^5.0.1", "ansi-styles": "^5.0.0", @@ -14524,8 +17784,9 @@ }, "node_modules/pretty-format/node_modules/ansi-styles": { "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", "dev": true, - "license": "MIT", "engines": { "node": ">=10" }, @@ -14535,51 +17796,59 @@ }, "node_modules/pretty-format/node_modules/react-is": { "version": "17.0.2", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true }, "node_modules/pretty-time": { "version": "1.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz", + "integrity": "sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==", "engines": { "node": ">=4" } }, "node_modules/prism-react-renderer": { - "version": "1.3.1", - "license": "MIT", + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz", + "integrity": "sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg==", "peerDependencies": { "react": ">=0.14.9" } }, "node_modules/prismjs": { - "version": "1.28.0", - "license": "MIT", + "version": "1.29.0", + "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz", + "integrity": "sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==", "engines": { "node": ">=6" } }, "node_modules/process": { "version": "0.11.10", - "license": "MIT", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", "engines": { "node": ">= 0.6.0" } }, "node_modules/process-nextick-args": { "version": "2.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "node_modules/promise": { "version": "7.3.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", "dependencies": { "asap": "~2.0.3" } }, "node_modules/prompts": { "version": "2.4.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", "dependencies": { "kleur": "^3.0.3", "sisteransi": "^1.0.5" @@ -14590,7 +17859,8 @@ }, "node_modules/prop-types": { "version": "15.8.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", @@ -14599,11 +17869,13 @@ }, "node_modules/prop-types/node_modules/react-is": { "version": "16.13.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/property-information": { "version": "5.6.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", + "integrity": "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==", "dependencies": { "xtend": "^4.0.0" }, @@ -14614,7 +17886,8 @@ }, "node_modules/proxy-addr": { "version": "2.0.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" @@ -14625,19 +17898,28 @@ }, "node_modules/proxy-addr/node_modules/ipaddr.js": { "version": "1.9.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", "engines": { "node": ">= 0.10" } }, + "node_modules/proxy-from-env": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", + "integrity": "sha512-F2JHgJQ1iqwnHDcQjVBsq3n/uoaFL+iPW/eAeL7kVxy/2RrWaN4WroKjjvbsoRtv0ftelNyC01bjRhn/bhcf4A==", + "dev": true + }, "node_modules/psl": { - "version": "1.8.0", - "dev": true, - "license": "MIT" + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", + "dev": true }, "node_modules/public-encrypt": { "version": "4.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", + "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==", "dependencies": { "bn.js": "^4.1.0", "browserify-rsa": "^4.0.0", @@ -14649,11 +17931,13 @@ }, "node_modules/public-encrypt/node_modules/bn.js": { "version": "4.12.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/pump": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -14661,14 +17945,16 @@ }, "node_modules/punycode": { "version": "2.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", "engines": { "node": ">=6" } }, "node_modules/pupa": { "version": "2.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz", + "integrity": "sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==", "dependencies": { "escape-goat": "^2.0.0" }, @@ -14678,51 +17964,53 @@ }, "node_modules/pure-color": { "version": "1.3.0", - "license": "MIT" - }, - "node_modules/q": { - "version": "1.5.1", - "license": "MIT", - "engines": { - "node": ">=0.6.0", - "teleport": ">=0.2.0" - } + "resolved": "https://registry.npmjs.org/pure-color/-/pure-color-1.3.0.tgz", + "integrity": "sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA==" }, "node_modules/qs": { - "version": "6.10.3", - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.4" - }, + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", + "dev": true, "engines": { "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" } }, "node_modules/querystring": { "version": "0.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", + "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", "engines": { "node": ">=0.4.x" } }, "node_modules/querystring-es3": { "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", + "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", "engines": { "node": ">=0.4.x" } }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true + }, "node_modules/queue": { "version": "6.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz", + "integrity": "sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA==", "dependencies": { "inherits": "~2.0.3" } }, "node_modules/queue-microtask": { "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", "funding": [ { "type": "github", @@ -14736,19 +18024,20 @@ "type": "consulting", "url": "https://feross.org/support" } - ], - "license": "MIT" + ] }, "node_modules/randombytes": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", "dependencies": { "safe-buffer": "^5.1.0" } }, "node_modules/randomfill": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz", + "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==", "dependencies": { "randombytes": "^2.0.5", "safe-buffer": "^5.1.0" @@ -14756,14 +18045,16 @@ }, "node_modules/range-parser": { "version": "1.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "integrity": "sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A==", "engines": { "node": ">= 0.6" } }, "node_modules/raw-body": { "version": "2.5.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", + "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", @@ -14776,14 +18067,16 @@ }, "node_modules/raw-body/node_modules/bytes": { "version": "3.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", "engines": { "node": ">= 0.8" } }, "node_modules/rc": { "version": "1.2.8", - "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", @@ -14794,16 +18087,23 @@ "rc": "cli.js" } }, + "node_modules/rc/node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, "node_modules/rc/node_modules/strip-json-comments": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/react": { "version": "17.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz", + "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==", "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" @@ -14814,7 +18114,8 @@ }, "node_modules/react-base16-styling": { "version": "0.6.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.6.0.tgz", + "integrity": "sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==", "dependencies": { "base16": "^1.0.0", "lodash.curry": "^4.0.1", @@ -14824,7 +18125,8 @@ }, "node_modules/react-dev-utils": { "version": "12.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz", + "integrity": "sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ==", "dependencies": { "@babel/code-frame": "^7.16.0", "address": "^1.1.2", @@ -14857,7 +18159,8 @@ }, "node_modules/react-dev-utils/node_modules/ansi-styles": { "version": "4.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { "color-convert": "^2.0.1" }, @@ -14870,7 +18173,8 @@ }, "node_modules/react-dev-utils/node_modules/chalk": { "version": "4.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -14884,7 +18188,8 @@ }, "node_modules/react-dev-utils/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, @@ -14894,11 +18199,13 @@ }, "node_modules/react-dev-utils/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/react-dev-utils/node_modules/escape-string-regexp": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "engines": { "node": ">=10" }, @@ -14908,7 +18215,8 @@ }, "node_modules/react-dev-utils/node_modules/find-up": { "version": "5.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -14922,21 +18230,24 @@ }, "node_modules/react-dev-utils/node_modules/has-flag": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/react-dev-utils/node_modules/loader-utils": { "version": "3.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.0.tgz", + "integrity": "sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ==", "engines": { "node": ">= 12.13.0" } }, "node_modules/react-dev-utils/node_modules/locate-path": { "version": "6.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dependencies": { "p-locate": "^5.0.0" }, @@ -14949,7 +18260,8 @@ }, "node_modules/react-dev-utils/node_modules/p-limit": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dependencies": { "yocto-queue": "^0.1.0" }, @@ -14962,7 +18274,8 @@ }, "node_modules/react-dev-utils/node_modules/p-locate": { "version": "5.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dependencies": { "p-limit": "^3.0.2" }, @@ -14975,7 +18288,8 @@ }, "node_modules/react-dev-utils/node_modules/supports-color": { "version": "7.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { "has-flag": "^4.0.0" }, @@ -14985,7 +18299,8 @@ }, "node_modules/react-dom": { "version": "17.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz", + "integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==", "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", @@ -14997,15 +18312,18 @@ }, "node_modules/react-error-overlay": { "version": "6.0.11", - "license": "MIT" + "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz", + "integrity": "sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg==" }, "node_modules/react-fast-compare": { "version": "3.2.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.0.tgz", + "integrity": "sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA==" }, "node_modules/react-helmet-async": { "version": "1.3.0", - "license": "Apache-2.0", + "resolved": "https://registry.npmjs.org/react-helmet-async/-/react-helmet-async-1.3.0.tgz", + "integrity": "sha512-9jZ57/dAn9t3q6hneQS0wukqC2ENOBgMNVEhb/ZG9ZSxUetzVIw4iAmEU38IaVg3QGYauQPhSeUTuIUtFglWpg==", "dependencies": { "@babel/runtime": "^7.12.5", "invariant": "^2.2.4", @@ -15019,12 +18337,14 @@ } }, "node_modules/react-is": { - "version": "18.1.0", - "license": "MIT" + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" }, "node_modules/react-json-view": { "version": "1.21.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/react-json-view/-/react-json-view-1.21.3.tgz", + "integrity": "sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw==", "dependencies": { "flux": "^4.0.1", "react-base16-styling": "^0.6.0", @@ -15038,12 +18358,14 @@ }, "node_modules/react-lifecycles-compat": { "version": "3.0.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", + "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==" }, "node_modules/react-loadable": { "name": "@docusaurus/react-loadable", "version": "5.5.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", + "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "dependencies": { "@types/react": "*", "prop-types": "^15.6.2" @@ -15054,7 +18376,8 @@ }, "node_modules/react-loadable-ssr-addon-v5-slorber": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz", + "integrity": "sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A==", "dependencies": { "@babel/runtime": "^7.10.3" }, @@ -15067,14 +18390,14 @@ } }, "node_modules/react-router": { - "version": "5.3.1", - "license": "MIT", + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz", + "integrity": "sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA==", "dependencies": { "@babel/runtime": "^7.12.13", "history": "^4.9.0", "hoist-non-react-statics": "^3.1.0", "loose-envify": "^1.3.1", - "mini-create-react-context": "^0.4.0", "path-to-regexp": "^1.7.0", "prop-types": "^15.6.2", "react-is": "^16.6.0", @@ -15087,7 +18410,8 @@ }, "node_modules/react-router-config": { "version": "5.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/react-router-config/-/react-router-config-5.1.1.tgz", + "integrity": "sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==", "dependencies": { "@babel/runtime": "^7.1.2" }, @@ -15097,14 +18421,15 @@ } }, "node_modules/react-router-dom": { - "version": "5.3.1", - "license": "MIT", + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.4.tgz", + "integrity": "sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ==", "dependencies": { "@babel/runtime": "^7.12.13", "history": "^4.9.0", "loose-envify": "^1.3.1", "prop-types": "^15.6.2", - "react-router": "5.3.1", + "react-router": "5.3.4", "tiny-invariant": "^1.0.2", "tiny-warning": "^1.0.0" }, @@ -15114,11 +18439,13 @@ }, "node_modules/react-router/node_modules/react-is": { "version": "16.13.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/react-tabs": { "version": "3.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/react-tabs/-/react-tabs-3.2.3.tgz", + "integrity": "sha512-jx325RhRVnS9DdFbeF511z0T0WEqEoMl1uCE3LoZ6VaZZm7ytatxbum0B8bCTmaiV0KsU+4TtLGTGevCic7SWg==", "dependencies": { "clsx": "^1.1.0", "prop-types": "^15.5.0" @@ -15128,25 +18455,27 @@ } }, "node_modules/react-textarea-autosize": { - "version": "8.3.3", - "license": "MIT", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.3.4.tgz", + "integrity": "sha512-CdtmP8Dc19xL8/R6sWvtknD/eCXkQr30dtvC4VmGInhRsfF8X/ihXCq6+9l9qbxmKRiq407/7z5fxE7cVWQNgQ==", "dependencies": { "@babel/runtime": "^7.10.2", - "use-composed-ref": "^1.0.0", - "use-latest": "^1.0.0" + "use-composed-ref": "^1.3.0", + "use-latest": "^1.2.1" }, "engines": { "node": ">=10" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, "node_modules/react-tooltip": { - "version": "4.2.21", - "license": "MIT", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-4.4.2.tgz", + "integrity": "sha512-VncdQWS6wfTZK/J1xW9PLCfKwmeNcxDGZ6mC8ZE7V3UUyNRw/ZpzcqQZm84WWKptAiBwyKL0PgCRrKJk3N440Q==", "dependencies": { - "prop-types": "^15.7.2", + "prop-types": "^15.8.1", "uuid": "^7.0.3" }, "engines": { @@ -15157,26 +18486,32 @@ "react-dom": ">=16.0.0" } }, + "node_modules/react-tooltip/node_modules/uuid": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", + "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/react-waypoint": { - "version": "10.1.0", - "license": "MIT", + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/react-waypoint/-/react-waypoint-10.3.0.tgz", + "integrity": "sha512-iF1y2c1BsoXuEGz08NoahaLFIGI9gTUAAOKip96HUmylRT6DUtpgoBPjk/Y8dfcFVmfVDvUzWjNXpZyKTOV0SQ==", "dependencies": { "@babel/runtime": "^7.12.5", "consolidated-events": "^1.1.0 || ^2.0.0", "prop-types": "^15.0.0", - "react-is": "^17.0.1" + "react-is": "^17.0.1 || ^18.0.0" }, "peerDependencies": { - "react": "^15.3.0 || ^16.0.0 || ^17.0.0" + "react": "^15.3.0 || ^16.0.0 || ^17.0.0 || ^18.0.0" } }, - "node_modules/react-waypoint/node_modules/react-is": { - "version": "17.0.2", - "license": "MIT" - }, "node_modules/readable-stream": { "version": "3.6.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -15188,7 +18523,8 @@ }, "node_modules/readdirp": { "version": "3.6.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "dependencies": { "picomatch": "^2.2.1" }, @@ -15198,10 +18534,13 @@ }, "node_modules/reading-time": { "version": "1.5.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/reading-time/-/reading-time-1.5.0.tgz", + "integrity": "sha512-onYyVhBNr4CmAxFsKS7bz+uTLRakypIe4R+5A824vBSkQy/hB3fZepoVEf8OVAxzLvK+H/jm9TzpI3ETSm64Kg==" }, "node_modules/rechoir": { "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", "dependencies": { "resolve": "^1.1.6" }, @@ -15211,7 +18550,8 @@ }, "node_modules/recursive-readdir": { "version": "2.2.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz", + "integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==", "dependencies": { "minimatch": "3.0.4" }, @@ -15221,7 +18561,8 @@ }, "node_modules/recursive-readdir/node_modules/minimatch": { "version": "3.0.4", - "license": "ISC", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -15231,8 +18572,9 @@ }, "node_modules/redent": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", "dev": true, - "license": "MIT", "dependencies": { "indent-string": "^4.0.0", "strip-indent": "^3.0.0" @@ -15242,11 +18584,11 @@ } }, "node_modules/redoc": { - "version": "2.0.0-rc.67", - "license": "MIT", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/redoc/-/redoc-2.0.0.tgz", + "integrity": "sha512-rU8iLdAkT89ywOkYk66Mr+IofqaMASlRvTew0dJvopCORMIPUcPMxjlJbJNC6wsn2vvMnpUFLQ/0ISDWn9BWag==", "dependencies": { - "@redocly/openapi-core": "^1.0.0-beta.88", - "@redocly/react-dropdown-aria": "^2.0.11", + "@redocly/openapi-core": "^1.0.0-beta.104", "classnames": "^2.3.1", "decko": "^1.2.0", "dompurify": "^2.2.8", @@ -15254,11 +18596,11 @@ "json-pointer": "^0.6.2", "lunr": "^2.3.9", "mark.js": "^8.11.1", - "marked": "^4.0.10", + "marked": "^4.0.15", "mobx-react": "^7.2.0", - "openapi-sampler": "^1.2.1", + "openapi-sampler": "^1.3.0", "path-browserify": "^1.0.1", - "perfect-scrollbar": "^1.5.1", + "perfect-scrollbar": "^1.5.5", "polished": "^4.1.3", "prismjs": "^1.27.0", "prop-types": "^15.7.2", @@ -15283,14 +18625,16 @@ }, "node_modules/redoc/node_modules/slugify": { "version": "1.4.7", - "license": "MIT", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.4.7.tgz", + "integrity": "sha512-tf+h5W1IrjNm/9rKKj0JU2MDMruiopx0jjVA5zCdBtcGjfp0+c5rHw/zADLC3IeKlGHtVbHtpfzvYA0OYT+HKg==", "engines": { "node": ">=8.0.0" } }, "node_modules/redoc/node_modules/style-loader": { "version": "3.3.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-3.3.1.tgz", + "integrity": "sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ==", "engines": { "node": ">= 12.13.0" }, @@ -15304,18 +18648,21 @@ }, "node_modules/reftools": { "version": "1.1.9", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/reftools/-/reftools-1.1.9.tgz", + "integrity": "sha512-OVede/NQE13xBQ+ob5CKd5KyeJYU2YInb1bmV4nRoOfquZPkAkxuOXicSe1PvqIuZZ4kD13sPKBbR7UFDmli6w==", "funding": { "url": "https://github.com/Mermade/oas-kit?sponsor=1" } }, "node_modules/regenerate": { "version": "1.4.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" }, "node_modules/regenerate-unicode-properties": { - "version": "10.0.1", - "license": "MIT", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz", + "integrity": "sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ==", "dependencies": { "regenerate": "^1.4.2" }, @@ -15324,24 +18671,43 @@ } }, "node_modules/regenerator-runtime": { - "version": "0.13.9", - "license": "MIT" + "version": "0.13.10", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz", + "integrity": "sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw==" }, "node_modules/regenerator-transform": { "version": "0.15.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.0.tgz", + "integrity": "sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==", "dependencies": { "@babel/runtime": "^7.8.4" } }, + "node_modules/regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/regexpu-core": { - "version": "5.0.1", - "license": "MIT", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.2.1.tgz", + "integrity": "sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ==", "dependencies": { "regenerate": "^1.4.2", - "regenerate-unicode-properties": "^10.0.1", - "regjsgen": "^0.6.0", - "regjsparser": "^0.8.2", + "regenerate-unicode-properties": "^10.1.0", + "regjsgen": "^0.7.1", + "regjsparser": "^0.9.1", "unicode-match-property-ecmascript": "^2.0.0", "unicode-match-property-value-ecmascript": "^2.0.0" }, @@ -15350,10 +18716,11 @@ } }, "node_modules/registry-auth-token": { - "version": "4.2.1", - "license": "MIT", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.2.tgz", + "integrity": "sha512-PC5ZysNb42zpFME6D/XlIgtNGdTl8bBOCw90xQLVMpzuuubJKYDWFAEuUNc+Cn8Z8724tg2SDhDRrkVEsqfDMg==", "dependencies": { - "rc": "^1.2.8" + "rc": "1.2.8" }, "engines": { "node": ">=6.0.0" @@ -15361,7 +18728,8 @@ }, "node_modules/registry-url": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", + "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", "dependencies": { "rc": "^1.2.8" }, @@ -15370,12 +18738,14 @@ } }, "node_modules/regjsgen": { - "version": "0.6.0", - "license": "MIT" + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.7.1.tgz", + "integrity": "sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA==" }, "node_modules/regjsparser": { - "version": "0.8.4", - "license": "BSD-2-Clause", + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz", + "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", "dependencies": { "jsesc": "~0.5.0" }, @@ -15385,26 +18755,69 @@ }, "node_modules/regjsparser/node_modules/jsesc": { "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", "bin": { "jsesc": "bin/jsesc" } }, + "node_modules/rehype-katex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/rehype-katex/-/rehype-katex-5.0.0.tgz", + "integrity": "sha512-ksSuEKCql/IiIadOHiKRMjypva9BLhuwQNascMqaoGLDVd0k2NlE2wMvgZ3rpItzRKCd6vs8s7MFbb8pcR0AEg==", + "dependencies": { + "@types/katex": "^0.11.0", + "hast-util-to-text": "^2.0.0", + "katex": "^0.13.0", + "rehype-parse": "^7.0.0", + "unified": "^9.0.0", + "unist-util-visit": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/rehype-parse": { - "version": "6.0.2", - "license": "MIT", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-7.0.1.tgz", + "integrity": "sha512-fOiR9a9xH+Le19i4fGzIEowAbwG7idy2Jzs4mOrFWBSJ0sNUgy0ev871dwWnbOo371SjgjG4pwzrbgSVrKxecw==", "dependencies": { - "hast-util-from-parse5": "^5.0.0", - "parse5": "^5.0.0", - "xtend": "^4.0.0" + "hast-util-from-parse5": "^6.0.0", + "parse5": "^6.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/rehype-parse/node_modules/hast-util-from-parse5": { + "node_modules/rehype-parse/node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" + }, + "node_modules/relateurl": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", + "integrity": "sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/remark-admonitions": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/remark-admonitions/-/remark-admonitions-1.2.1.tgz", + "integrity": "sha512-Ji6p68VDvD+H1oS95Fdx9Ar5WA2wcDA4kwrrhVU7fGctC6+d3uiMICu7w7/2Xld+lnU7/gi+432+rRbup5S8ow==", + "dependencies": { + "rehype-parse": "^6.0.2", + "unified": "^8.4.2", + "unist-util-visit": "^2.0.1" + } + }, + "node_modules/remark-admonitions/node_modules/hast-util-from-parse5": { "version": "5.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-5.0.3.tgz", + "integrity": "sha512-gOc8UB99F6eWVWFtM9jUikjN7QkWxB3nY0df5Z0Zq1/Nkwl5V4hAAsl0tmwlgWl/1shlTF8DnNYLO8X6wRV9pA==", "dependencies": { "ccount": "^1.0.3", "hastscript": "^5.0.0", @@ -15417,9 +18830,10 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/rehype-parse/node_modules/hastscript": { + "node_modules/remark-admonitions/node_modules/hastscript": { "version": "5.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-5.1.2.tgz", + "integrity": "sha512-WlztFuK+Lrvi3EggsqOkQ52rKbxkXL3RwB6t5lwoa8QLMemoWfBuL43eDrwOamJyR7uKQKdmKYaBH1NZBiIRrQ==", "dependencies": { "comma-separated-tokens": "^1.0.0", "hast-util-parse-selector": "^2.0.0", @@ -15431,29 +18845,29 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/rehype-parse/node_modules/parse5": { + "node_modules/remark-admonitions/node_modules/parse5": { "version": "5.1.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==" }, - "node_modules/relateurl": { - "version": "0.2.7", - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/remark-admonitions": { - "version": "1.2.1", - "license": "MIT", + "node_modules/remark-admonitions/node_modules/rehype-parse": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-6.0.2.tgz", + "integrity": "sha512-0S3CpvpTAgGmnz8kiCyFLGuW5yA4OQhyNTm/nwPopZ7+PI11WnGl1TTWTGv/2hPEe/g2jRLlhVVSsoDH8waRug==", "dependencies": { - "rehype-parse": "^6.0.2", - "unified": "^8.4.2", - "unist-util-visit": "^2.0.1" + "hast-util-from-parse5": "^5.0.0", + "parse5": "^5.0.0", + "xtend": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, "node_modules/remark-admonitions/node_modules/unified": { "version": "8.4.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unified/-/unified-8.4.2.tgz", + "integrity": "sha512-JCrmN13jI4+h9UAyKEoGcDZV+i1E7BLFuG7OsaDvTXI5P0qhHX+vZO/kOhz9jn8HGENDKbwSeB0nVOg4gVStGA==", "dependencies": { "bail": "^1.0.0", "extend": "^3.0.0", @@ -15468,7 +18882,8 @@ }, "node_modules/remark-emoji": { "version": "2.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz", + "integrity": "sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w==", "dependencies": { "emoticon": "^3.2.0", "node-emoji": "^1.10.0", @@ -15477,7 +18892,17 @@ }, "node_modules/remark-footnotes": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-2.0.0.tgz", + "integrity": "sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-math": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/remark-math/-/remark-math-3.0.1.tgz", + "integrity": "sha512-epT77R/HK0x7NqrWHdSV75uNLwn8g9qTyMqCRCDujL0vj/6T6+yhdrR7mjELWtkse+Fw02kijAaBuVcHBor1+Q==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -15485,7 +18910,8 @@ }, "node_modules/remark-mdx": { "version": "1.6.22", - "license": "MIT", + "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz", + "integrity": "sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ==", "dependencies": { "@babel/core": "7.12.9", "@babel/helper-plugin-utils": "7.10.4", @@ -15503,7 +18929,8 @@ }, "node_modules/remark-mdx/node_modules/@babel/core": { "version": "7.12.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", + "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", @@ -15532,11 +18959,13 @@ }, "node_modules/remark-mdx/node_modules/@babel/helper-plugin-utils": { "version": "7.10.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", + "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, "node_modules/remark-mdx/node_modules/@babel/plugin-proposal-object-rest-spread": { "version": "7.12.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz", + "integrity": "sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.0", @@ -15548,7 +18977,8 @@ }, "node_modules/remark-mdx/node_modules/@babel/plugin-syntax-jsx": { "version": "7.12.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz", + "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, @@ -15558,21 +18988,24 @@ }, "node_modules/remark-mdx/node_modules/semver": { "version": "5.7.1", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "bin": { "semver": "bin/semver" } }, "node_modules/remark-mdx/node_modules/source-map": { "version": "0.5.7", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/remark-parse": { "version": "8.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz", + "integrity": "sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q==", "dependencies": { "ccount": "^1.0.0", "collapse-white-space": "^1.0.2", @@ -15598,7 +19031,8 @@ }, "node_modules/remark-squeeze-paragraphs": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz", + "integrity": "sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw==", "dependencies": { "mdast-squeeze-paragraphs": "^4.0.0" }, @@ -15609,7 +19043,8 @@ }, "node_modules/renderkid": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", + "integrity": "sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==", "dependencies": { "css-select": "^4.1.3", "dom-converter": "^0.2.0", @@ -15618,42 +19053,139 @@ "strip-ansi": "^6.0.1" } }, + "node_modules/renderkid/node_modules/css-select": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", + "integrity": "sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==", + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.0.1", + "domhandler": "^4.3.1", + "domutils": "^2.8.0", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/renderkid/node_modules/dom-serializer": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", + "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", + "dependencies": { + "domelementtype": "^2.0.1", + "domhandler": "^4.2.0", + "entities": "^2.0.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/renderkid/node_modules/domhandler": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", + "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", + "dependencies": { + "domelementtype": "^2.2.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/renderkid/node_modules/domutils": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", + "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", + "dependencies": { + "dom-serializer": "^1.0.1", + "domelementtype": "^2.2.0", + "domhandler": "^4.2.0" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/renderkid/node_modules/entities": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", + "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/renderkid/node_modules/htmlparser2": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz", + "integrity": "sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==", + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "dependencies": { + "domelementtype": "^2.0.1", + "domhandler": "^4.0.0", + "domutils": "^2.5.2", + "entities": "^2.0.0" + } + }, "node_modules/repeat-string": { "version": "1.6.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", "engines": { "node": ">=0.10" } }, + "node_modules/request-progress": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", + "integrity": "sha512-MnWzEHHaxHO2iWiQuHrUPBi/1WeBf5PkxQqNyNvLl9VAYSdXkP8tQ3pBSeCPD+yw0v0Aq1zosWLz0BdeXpWwZg==", + "dev": true, + "dependencies": { + "throttleit": "^1.0.0" + } + }, "node_modules/require-directory": { "version": "2.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "engines": { "node": ">=0.10.0" } }, "node_modules/require-from-string": { "version": "2.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", "engines": { "node": ">=0.10.0" } }, "node_modules/require-like": { "version": "0.1.2", + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", "engines": { "node": "*" } }, "node_modules/requires-port": { "version": "1.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" }, "node_modules/resolve": { - "version": "1.22.0", - "license": "MIT", + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", "dependencies": { - "is-core-module": "^2.8.1", + "is-core-module": "^2.9.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, @@ -15666,8 +19198,9 @@ }, "node_modules/resolve-cwd": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", "dev": true, - "license": "MIT", "dependencies": { "resolve-from": "^5.0.0" }, @@ -15677,56 +19210,83 @@ }, "node_modules/resolve-cwd/node_modules/resolve-from": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/resolve-from": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "engines": { "node": ">=4" } }, "node_modules/resolve-pathname": { "version": "3.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", + "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==" }, "node_modules/resolve.exports": { "version": "1.1.0", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.0.tgz", + "integrity": "sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/responselike": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==", + "dependencies": { + "lowercase-keys": "^1.0.0" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, "dependencies": { - "lowercase-keys": "^1.0.0" + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" } }, "node_modules/retry": { "version": "0.13.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", "engines": { "node": ">= 4" } }, "node_modules/reusify": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", "engines": { "iojs": ">=1.0.0", "node": ">=0.10.0" } }, + "node_modules/rfdc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", + "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", + "dev": true + }, "node_modules/rimraf": { "version": "3.0.2", - "license": "ISC", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "dependencies": { "glob": "^7.1.3" }, @@ -15739,7 +19299,8 @@ }, "node_modules/ripemd160": { "version": "2.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", "dependencies": { "hash-base": "^3.0.0", "inherits": "^2.0.1" @@ -15747,11 +19308,13 @@ }, "node_modules/rtl-detect": { "version": "1.0.4", - "license": "BSD-3-Clause" + "resolved": "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.0.4.tgz", + "integrity": "sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ==" }, "node_modules/rtlcss": { "version": "3.5.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/rtlcss/-/rtlcss-3.5.0.tgz", + "integrity": "sha512-wzgMaMFHQTnyi9YOwsx9LjOxYXJPzS8sYnFaKm6R5ysvTkwzHiB0vxnbHwchHQT65PTdBjDG21/kQBWI7q9O7A==", "dependencies": { "find-up": "^5.0.0", "picocolors": "^1.0.0", @@ -15764,7 +19327,8 @@ }, "node_modules/rtlcss/node_modules/find-up": { "version": "5.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -15778,7 +19342,8 @@ }, "node_modules/rtlcss/node_modules/locate-path": { "version": "6.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dependencies": { "p-locate": "^5.0.0" }, @@ -15791,7 +19356,8 @@ }, "node_modules/rtlcss/node_modules/p-limit": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dependencies": { "yocto-queue": "^0.1.0" }, @@ -15804,7 +19370,8 @@ }, "node_modules/rtlcss/node_modules/p-locate": { "version": "5.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dependencies": { "p-limit": "^3.0.2" }, @@ -15817,6 +19384,8 @@ }, "node_modules/run-parallel": { "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", "funding": [ { "type": "github", @@ -15831,34 +19400,65 @@ "url": "https://feross.org/support" } ], - "license": "MIT", "dependencies": { "queue-microtask": "^1.2.2" } }, "node_modules/rxjs": { - "version": "7.5.5", - "license": "Apache-2.0", + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.5.7.tgz", + "integrity": "sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA==", "dependencies": { "tslib": "^2.1.0" } }, "node_modules/safe-buffer": { - "version": "5.1.2", - "license": "MIT" + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safe-regex-test": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", + "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "is-regex": "^1.1.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/safer-buffer": { "version": "2.1.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "node_modules/sax": { "version": "1.2.4", - "license": "ISC" + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" }, "node_modules/saxes": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", + "integrity": "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==", "dev": true, - "license": "ISC", "dependencies": { "xmlchars": "^2.2.0" }, @@ -15868,7 +19468,8 @@ }, "node_modules/scheduler": { "version": "0.20.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz", + "integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==", "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" @@ -15876,7 +19477,8 @@ }, "node_modules/schema-utils": { "version": "2.7.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", + "integrity": "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==", "dependencies": { "@types/json-schema": "^7.0.5", "ajv": "^6.12.4", @@ -15892,7 +19494,8 @@ }, "node_modules/section-matter": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", + "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", "dependencies": { "extend-shallow": "^2.0.1", "kind-of": "^6.0.0" @@ -15903,11 +19506,13 @@ }, "node_modules/select-hose": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", + "integrity": "sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==" }, "node_modules/selfsigned": { - "version": "2.0.1", - "license": "MIT", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-2.1.1.tgz", + "integrity": "sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ==", "dependencies": { "node-forge": "^1" }, @@ -15916,8 +19521,9 @@ } }, "node_modules/semver": { - "version": "7.3.7", - "license": "ISC", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "dependencies": { "lru-cache": "^6.0.0" }, @@ -15930,7 +19536,8 @@ }, "node_modules/semver-diff": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", + "integrity": "sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==", "dependencies": { "semver": "^6.3.0" }, @@ -15940,14 +19547,16 @@ }, "node_modules/semver-diff/node_modules/semver": { "version": "6.3.0", - "license": "ISC", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "bin": { "semver": "bin/semver.js" } }, "node_modules/send": { "version": "0.18.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", + "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", @@ -15969,36 +19578,42 @@ }, "node_modules/send/node_modules/debug": { "version": "2.6.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/send/node_modules/debug/node_modules/ms": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/send/node_modules/ms": { "version": "2.1.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/send/node_modules/range-parser": { "version": "1.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", "engines": { "node": ">= 0.6" } }, "node_modules/serialize-javascript": { "version": "6.0.0", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", "dependencies": { "randombytes": "^2.1.0" } }, "node_modules/serve-handler": { "version": "6.1.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz", + "integrity": "sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==", "dependencies": { "bytes": "3.0.0", "content-disposition": "0.5.2", @@ -16012,14 +19627,16 @@ }, "node_modules/serve-handler/node_modules/mime-db": { "version": "1.33.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", + "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==", "engines": { "node": ">= 0.6" } }, "node_modules/serve-handler/node_modules/mime-types": { "version": "2.1.18", - "license": "MIT", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", + "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", "dependencies": { "mime-db": "~1.33.0" }, @@ -16029,7 +19646,8 @@ }, "node_modules/serve-handler/node_modules/minimatch": { "version": "3.0.4", - "license": "ISC", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -16039,11 +19657,13 @@ }, "node_modules/serve-handler/node_modules/path-to-regexp": { "version": "2.2.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz", + "integrity": "sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ==" }, "node_modules/serve-index": { "version": "1.9.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", + "integrity": "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==", "dependencies": { "accepts": "~1.3.4", "batch": "0.6.1", @@ -16059,21 +19679,24 @@ }, "node_modules/serve-index/node_modules/debug": { "version": "2.6.9", - "license": "MIT", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { "ms": "2.0.0" } }, "node_modules/serve-index/node_modules/depd": { "version": "1.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", "engines": { "node": ">= 0.6" } }, "node_modules/serve-index/node_modules/http-errors": { "version": "1.6.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==", "dependencies": { "depd": "~1.1.2", "inherits": "2.0.3", @@ -16086,26 +19709,31 @@ }, "node_modules/serve-index/node_modules/inherits": { "version": "2.0.3", - "license": "ISC" + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, "node_modules/serve-index/node_modules/ms": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/serve-index/node_modules/setprototypeof": { "version": "1.1.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" }, "node_modules/serve-index/node_modules/statuses": { "version": "1.5.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", "engines": { "node": ">= 0.6" } }, "node_modules/serve-static": { "version": "1.15.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", "dependencies": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", @@ -16116,21 +19744,20 @@ "node": ">= 0.8.0" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "license": "ISC" - }, "node_modules/setimmediate": { "version": "1.0.5", - "license": "MIT" + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==" }, "node_modules/setprototypeof": { "version": "1.2.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "node_modules/sha.js": { "version": "2.4.11", - "license": "(MIT AND BSD-3-Clause)", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", "dependencies": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" @@ -16141,7 +19768,8 @@ }, "node_modules/shallow-clone": { "version": "3.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", "dependencies": { "kind-of": "^6.0.2" }, @@ -16151,17 +19779,19 @@ }, "node_modules/shallowequal": { "version": "1.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", + "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==" }, "node_modules/sharp": { - "version": "0.30.4", + "version": "0.30.7", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.30.7.tgz", + "integrity": "sha512-G+MY2YW33jgflKPTXXptVO28HvNOo9G3j0MybYAHeEmby+QuD2U98dT6ueht9cv/XDqZspSpIhoSW+BAKJ7Hig==", "hasInstallScript": true, - "license": "Apache-2.0", "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.1", - "node-addon-api": "^4.3.0", - "prebuild-install": "^7.0.1", + "node-addon-api": "^5.0.0", + "prebuild-install": "^7.1.1", "semver": "^7.3.7", "simple-get": "^4.0.1", "tar-fs": "^2.1.1", @@ -16176,7 +19806,8 @@ }, "node_modules/sharp/node_modules/color": { "version": "4.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" @@ -16187,7 +19818,8 @@ }, "node_modules/sharp/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, @@ -16197,11 +19829,13 @@ }, "node_modules/sharp/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/shebang-command": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dependencies": { "shebang-regex": "^3.0.0" }, @@ -16211,18 +19845,24 @@ }, "node_modules/shebang-regex": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "engines": { "node": ">=8" } }, "node_modules/shell-quote": { - "version": "1.7.3", - "license": "MIT" + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.4.tgz", + "integrity": "sha512-8o/QEhSSRb1a5i7TFR0iM4G16Z0vYB2OQVs4G3aAFXjn3T6yEx8AZxy1PgDF7I00LZHYA3WxaSYIf5e5sAX8Rw==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/shelljs": { "version": "0.8.5", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", "dependencies": { "glob": "^7.0.0", "interpret": "^1.0.0", @@ -16237,7 +19877,8 @@ }, "node_modules/should": { "version": "13.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/should/-/should-13.2.3.tgz", + "integrity": "sha512-ggLesLtu2xp+ZxI+ysJTmNjh2U0TsC+rQ/pfED9bUZZ4DKefP27D+7YJVVTvKsmjLpIi9jAa7itwDGkDDmt1GQ==", "dependencies": { "should-equal": "^2.0.0", "should-format": "^3.0.3", @@ -16248,14 +19889,16 @@ }, "node_modules/should-equal": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/should-equal/-/should-equal-2.0.0.tgz", + "integrity": "sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==", "dependencies": { "should-type": "^1.4.0" } }, "node_modules/should-format": { "version": "3.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/should-format/-/should-format-3.0.3.tgz", + "integrity": "sha512-hZ58adtulAk0gKtua7QxevgUaXTTXxIi8t41L3zo9AHvjXO1/7sdLECuHeIN2SRtYXpNkmhoUP2pdeWgricQ+Q==", "dependencies": { "should-type": "^1.3.0", "should-type-adaptors": "^1.0.1" @@ -16263,11 +19906,13 @@ }, "node_modules/should-type": { "version": "1.4.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/should-type/-/should-type-1.4.0.tgz", + "integrity": "sha512-MdAsTu3n25yDbIe1NeN69G4n6mUnJGtSJHygX3+oN0ZbO3DTiATnf7XnYJdGT42JCXurTb1JI0qOBR65shvhPQ==" }, "node_modules/should-type-adaptors": { "version": "1.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/should-type-adaptors/-/should-type-adaptors-1.1.0.tgz", + "integrity": "sha512-JA4hdoLnN+kebEp2Vs8eBe9g7uy0zbRo+RMcU0EsNy+R+k049Ki+N5tT5Jagst2g7EAja+euFuoXFCa8vIklfA==", "dependencies": { "should-type": "^1.3.0", "should-util": "^1.0.0" @@ -16275,11 +19920,13 @@ }, "node_modules/should-util": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/should-util/-/should-util-1.0.1.tgz", + "integrity": "sha512-oXF8tfxx5cDk8r2kYqlkUJzZpDBqVY/II2WhvU0n9Y3XYvAYRmeaf1PvvIvTgPnv4KJ+ES5M0PyDq5Jp+Ygy2g==" }, "node_modules/side-channel": { "version": "1.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", "dependencies": { "call-bind": "^1.0.0", "get-intrinsic": "^1.0.2", @@ -16291,10 +19938,13 @@ }, "node_modules/signal-exit": { "version": "3.0.7", - "license": "ISC" + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" }, "node_modules/simple-concat": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", "funding": [ { "type": "github", @@ -16308,11 +19958,12 @@ "type": "consulting", "url": "https://feross.org/support" } - ], - "license": "MIT" + ] }, "node_modules/simple-get": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", "funding": [ { "type": "github", @@ -16327,7 +19978,6 @@ "url": "https://feross.org/support" } ], - "license": "MIT", "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", @@ -16336,23 +19986,27 @@ }, "node_modules/simple-html-tokenizer": { "version": "0.1.1", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/simple-html-tokenizer/-/simple-html-tokenizer-0.1.1.tgz", + "integrity": "sha512-Mc/gH3RvlKvB/gkp9XwgDKEWrSYyefIJPGG8Jk1suZms/rISdUuVEMx5O1WBnTWaScvxXDvGJrZQWblUmQHjkQ==", + "dev": true }, "node_modules/simple-swizzle": { "version": "0.2.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", "dependencies": { "is-arrayish": "^0.3.1" } }, "node_modules/simple-swizzle/node_modules/is-arrayish": { "version": "0.3.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" }, "node_modules/sirv": { "version": "1.0.19", - "license": "MIT", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-1.0.19.tgz", + "integrity": "sha512-JuLThK3TnZG1TAKDwNIqNq6QA2afLOCcm+iE8D1Kj3GA40pSPsxQjjJl0J8X3tsR7T+CP1GavpzLwYkgVLWrZQ==", "dependencies": { "@polka/url": "^1.0.0-next.20", "mrmime": "^1.0.0", @@ -16364,11 +20018,13 @@ }, "node_modules/sisteransi": { "version": "1.0.5", - "license": "MIT" + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" }, "node_modules/sitemap": { "version": "7.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/sitemap/-/sitemap-7.1.1.tgz", + "integrity": "sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==", "dependencies": { "@types/node": "^17.0.5", "@types/sax": "^1.2.1", @@ -16383,73 +20039,112 @@ "npm": ">=5.6.0" } }, + "node_modules/sitemap/node_modules/@types/node": { + "version": "17.0.45", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", + "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==" + }, "node_modules/slash": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" } }, + "node_modules/slice-ansi/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, "node_modules/slugify": { "version": "1.6.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.5.tgz", + "integrity": "sha512-8mo9bslnBO3tr5PEVFzMPIWwWnipGS0xVbYf65zxDqfNwmzYn1LpiKNrR6DlClusuvo+hDHd1zKpmfAe83NQSQ==", "engines": { "node": ">=8.0.0" } }, "node_modules/sockjs": { "version": "0.3.24", - "license": "MIT", + "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz", + "integrity": "sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==", "dependencies": { "faye-websocket": "^0.11.3", "uuid": "^8.3.2", "websocket-driver": "^0.7.4" } }, - "node_modules/sockjs/node_modules/uuid": { - "version": "8.3.2", - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, "node_modules/sort-css-media-queries": { - "version": "2.0.4", - "license": "MIT", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.1.0.tgz", + "integrity": "sha512-IeWvo8NkNiY2vVYdPa27MCQiR0MN0M80johAYFVxWWXQ44KU84WNxjslwBHmc/7ZL2ccwkM7/e6S5aiKZXm7jA==", "engines": { "node": ">= 6.3.0" } }, - "node_modules/source-list-map": { - "version": "2.0.1", - "license": "MIT" - }, "node_modules/source-map": { "version": "0.6.1", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "engines": { "node": ">=0.10.0" } }, "node_modules/source-map-js": { "version": "1.0.2", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", "engines": { "node": ">=0.10.0" } }, - "node_modules/source-map-resolve": { - "version": "0.6.0", - "dev": true, - "license": "MIT", - "dependencies": { - "atob": "^2.1.2", - "decode-uri-component": "^0.2.0" - } - }, "node_modules/source-map-support": { "version": "0.5.21", - "license": "MIT", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" @@ -16457,7 +20152,8 @@ }, "node_modules/space-separated-tokens": { "version": "1.1.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", + "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -16465,7 +20161,8 @@ }, "node_modules/spdy": { "version": "4.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", + "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", "dependencies": { "debug": "^4.1.0", "handle-thing": "^2.0.0", @@ -16479,7 +20176,8 @@ }, "node_modules/spdy-transport": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", + "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", "dependencies": { "debug": "^4.1.0", "detect-node": "^2.0.4", @@ -16491,16 +20189,50 @@ }, "node_modules/sprintf-js": { "version": "1.0.3", - "license": "BSD-3-Clause" + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" + }, + "node_modules/sshpk": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", + "dev": true, + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ssr-window": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/ssr-window/-/ssr-window-4.0.2.tgz", + "integrity": "sha512-ISv/Ch+ig7SOtw7G2+qkwfVASzazUnvlDTwypdLoPoySv+6MqlOV10VwPSE6EWkGjhW50lUmghPmpYZXMu/+AQ==" }, "node_modules/stable": { "version": "0.1.8", - "license": "MIT" + "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", + "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==", + "deprecated": "Modern JS already guarantees Array#sort() is a stable sort, so this library is deprecated. See the compatibility table on MDN: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort#browser_compatibility" }, "node_modules/stack-utils": { "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", "dev": true, - "license": "MIT", "dependencies": { "escape-string-regexp": "^2.0.0" }, @@ -16510,15 +20242,17 @@ }, "node_modules/stack-utils/node_modules/escape-string-regexp": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/state-toggle": { "version": "1.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz", + "integrity": "sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -16526,21 +20260,26 @@ }, "node_modules/statuses": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", "engines": { "node": ">= 0.8" } }, "node_modules/std-env": { - "version": "3.1.1", - "license": "MIT" + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.3.0.tgz", + "integrity": "sha512-cNNS+VYsXIs5gI6gJipO4qZ8YYT274JHvNnQ1/R/x8Q8mdP0qj0zoMchRXmBNPqp/0eOEhX+3g7g6Fgb7meLIQ==" }, "node_modules/stickyfill": { - "version": "1.1.1" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/stickyfill/-/stickyfill-1.1.1.tgz", + "integrity": "sha512-GCp7vHAfpao+Qh/3Flh9DXEJ/qSi0KJwJw6zYlZOtRYXWUIpMM6mC2rIep/dK8RQqwW0KxGJIllmjPIBOGN8AA==" }, "node_modules/stream-browserify": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz", + "integrity": "sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==", "dependencies": { "inherits": "~2.0.4", "readable-stream": "^3.5.0" @@ -16548,7 +20287,8 @@ }, "node_modules/stream-http": { "version": "3.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-3.2.0.tgz", + "integrity": "sha512-Oq1bLqisTyK3TSCXpPbT4sdeYNdmyZJv1LxpEm2vu1ZhK89kSE5YXwZc3cWk0MagGaKriBh9mCFbVGtO+vY29A==", "dependencies": { "builtin-status-codes": "^3.0.0", "inherits": "^2.0.4", @@ -16558,33 +20298,17 @@ }, "node_modules/string_decoder": { "version": "1.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "dependencies": { "safe-buffer": "~5.2.0" } }, - "node_modules/string_decoder/node_modules/safe-buffer": { - "version": "5.2.1", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, "node_modules/string-length": { "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", "dev": true, - "license": "MIT", "dependencies": { "char-regex": "^1.0.2", "strip-ansi": "^6.0.0" @@ -16595,7 +20319,8 @@ }, "node_modules/string-width": { "version": "5.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", @@ -16610,7 +20335,8 @@ }, "node_modules/string-width/node_modules/ansi-regex": { "version": "6.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", "engines": { "node": ">=12" }, @@ -16620,7 +20346,8 @@ }, "node_modules/string-width/node_modules/strip-ansi": { "version": "7.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", + "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -16632,22 +20359,26 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.4", - "license": "MIT", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", + "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3" + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.4", - "license": "MIT", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", + "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3" + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -16655,7 +20386,8 @@ }, "node_modules/stringify-object": { "version": "3.3.0", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", + "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", "dependencies": { "get-own-enumerable-property-symbols": "^3.0.0", "is-obj": "^1.0.1", @@ -16667,7 +20399,8 @@ }, "node_modules/strip-ansi": { "version": "6.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dependencies": { "ansi-regex": "^5.0.1" }, @@ -16677,30 +20410,34 @@ }, "node_modules/strip-bom": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/strip-bom-string": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", + "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", "engines": { "node": ">=0.10.0" } }, "node_modules/strip-final-newline": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", "engines": { "node": ">=6" } }, "node_modules/strip-indent": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", "dev": true, - "license": "MIT", "dependencies": { "min-indent": "^1.0.0" }, @@ -16710,7 +20447,8 @@ }, "node_modules/strip-json-comments": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "engines": { "node": ">=8" }, @@ -16720,8 +20458,9 @@ }, "node_modules/style-loader": { "version": "1.3.0", + "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-1.3.0.tgz", + "integrity": "sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q==", "dev": true, - "license": "MIT", "dependencies": { "loader-utils": "^2.0.0", "schema-utils": "^2.7.0" @@ -16739,14 +20478,16 @@ }, "node_modules/style-to-object": { "version": "0.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", + "integrity": "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==", "dependencies": { "inline-style-parser": "0.1.1" } }, "node_modules/styled-components": { "version": "5.3.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.3.tgz", + "integrity": "sha512-++4iHwBM7ZN+x6DtPPWkCI4vdtwumQ+inA/DdAsqYd4SVgUKJie5vXyzotA00ttcFdQkCng7zc6grwlfIfw+lw==", "dependencies": { "@babel/helper-module-imports": "^7.0.0", "@babel/traverse": "^7.4.5", @@ -16774,7 +20515,8 @@ }, "node_modules/stylehacks": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.0.tgz", + "integrity": "sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q==", "dependencies": { "browserslist": "^4.16.6", "postcss-selector-parser": "^6.0.4" @@ -16788,7 +20530,8 @@ }, "node_modules/supports-color": { "version": "5.5.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "dependencies": { "has-flag": "^3.0.0" }, @@ -16797,9 +20540,10 @@ } }, "node_modules/supports-hyperlinks": { - "version": "2.2.0", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz", + "integrity": "sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0", "supports-color": "^7.0.0" @@ -16810,16 +20554,18 @@ }, "node_modules/supports-hyperlinks/node_modules/has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/supports-hyperlinks/node_modules/supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -16829,7 +20575,8 @@ }, "node_modules/supports-preserve-symlinks-flag": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", "engines": { "node": ">= 0.4" }, @@ -16839,8 +20586,9 @@ }, "node_modules/svg-inline-loader": { "version": "0.8.2", + "resolved": "https://registry.npmjs.org/svg-inline-loader/-/svg-inline-loader-0.8.2.tgz", + "integrity": "sha512-kbrcEh5n5JkypaSC152eGfGcnT4lkR0eSfvefaUJkLqgGjRQJyKDvvEE/CCv5aTSdfXuc+N98w16iAojhShI3g==", "dev": true, - "license": "MIT", "dependencies": { "loader-utils": "^1.1.0", "object-assign": "^4.0.1", @@ -16849,8 +20597,9 @@ }, "node_modules/svg-inline-loader/node_modules/json5": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", "dev": true, - "license": "MIT", "dependencies": { "minimist": "^1.2.0" }, @@ -16860,8 +20609,9 @@ }, "node_modules/svg-inline-loader/node_modules/loader-utils": { "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz", + "integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==", "dev": true, - "license": "MIT", "dependencies": { "big.js": "^5.2.2", "emojis-list": "^3.0.0", @@ -16873,101 +20623,104 @@ }, "node_modules/svg-parser": { "version": "2.0.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", + "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==" }, "node_modules/svgo": { - "version": "1.3.2", - "license": "MIT", - "dependencies": { - "chalk": "^2.4.1", - "coa": "^2.0.2", - "css-select": "^2.0.0", - "css-select-base-adapter": "^0.1.1", - "css-tree": "1.0.0-alpha.37", - "csso": "^4.0.2", - "js-yaml": "^3.13.1", - "mkdirp": "~0.5.1", - "object.values": "^1.1.0", - "sax": "~1.2.4", - "stable": "^0.1.8", - "unquote": "~1.1.1", - "util.promisify": "~1.0.0" + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.8.0.tgz", + "integrity": "sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg==", + "dependencies": { + "@trysound/sax": "0.2.0", + "commander": "^7.2.0", + "css-select": "^4.1.3", + "css-tree": "^1.1.3", + "csso": "^4.2.0", + "picocolors": "^1.0.0", + "stable": "^0.1.8" }, "bin": { "svgo": "bin/svgo" }, "engines": { - "node": ">=4.0.0" + "node": ">=10.13.0" } }, - "node_modules/svgo/node_modules/argparse": { - "version": "1.0.10", - "license": "MIT", - "dependencies": { - "sprintf-js": "~1.0.2" + "node_modules/svgo/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "engines": { + "node": ">= 10" } }, "node_modules/svgo/node_modules/css-select": { - "version": "2.1.0", - "license": "BSD-2-Clause", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", + "integrity": "sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==", "dependencies": { "boolbase": "^1.0.0", - "css-what": "^3.2.1", - "domutils": "^1.7.0", - "nth-check": "^1.0.2" - } - }, - "node_modules/svgo/node_modules/css-what": { - "version": "3.4.2", - "license": "BSD-2-Clause", - "engines": { - "node": ">= 6" + "css-what": "^6.0.1", + "domhandler": "^4.3.1", + "domutils": "^2.8.0", + "nth-check": "^2.0.1" }, "funding": { "url": "https://github.com/sponsors/fb55" } }, "node_modules/svgo/node_modules/dom-serializer": { - "version": "0.2.2", - "license": "MIT", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", + "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", "dependencies": { "domelementtype": "^2.0.1", + "domhandler": "^4.2.0", "entities": "^2.0.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, - "node_modules/svgo/node_modules/domutils": { - "version": "1.7.0", - "license": "BSD-2-Clause", + "node_modules/svgo/node_modules/domhandler": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", + "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", "dependencies": { - "dom-serializer": "0", - "domelementtype": "1" + "domelementtype": "^2.2.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" } }, - "node_modules/svgo/node_modules/domutils/node_modules/domelementtype": { - "version": "1.3.1", - "license": "BSD-2-Clause" - }, - "node_modules/svgo/node_modules/js-yaml": { - "version": "3.14.1", - "license": "MIT", + "node_modules/svgo/node_modules/domutils": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", + "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" + "dom-serializer": "^1.0.1", + "domelementtype": "^2.2.0", + "domhandler": "^4.2.0" }, - "bin": { - "js-yaml": "bin/js-yaml.js" + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" } }, - "node_modules/svgo/node_modules/nth-check": { - "version": "1.0.2", - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "~1.0.0" + "node_modules/svgo/node_modules/entities": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", + "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" } }, "node_modules/swagger2openapi": { "version": "7.0.8", - "license": "BSD-3-Clause", + "resolved": "https://registry.npmjs.org/swagger2openapi/-/swagger2openapi-7.0.8.tgz", + "integrity": "sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==", "dependencies": { "call-me-maybe": "^1.0.1", "node-fetch": "^2.6.1", @@ -16990,13 +20743,58 @@ "url": "https://github.com/Mermade/oas-kit?sponsor=1" } }, + "node_modules/swagger2openapi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/swagger2openapi/node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/swagger2openapi/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/swagger2openapi/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, "node_modules/swagger2openapi/node_modules/emoji-regex": { "version": "8.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/swagger2openapi/node_modules/string-width": { "version": "4.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -17006,11 +20804,28 @@ "node": ">=8" } }, + "node_modules/swagger2openapi/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/swagger2openapi/node_modules/yargs": { - "version": "17.4.1", - "license": "MIT", + "version": "17.6.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.0.tgz", + "integrity": "sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g==", "dependencies": { - "cliui": "^7.0.2", + "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", @@ -17023,27 +20838,54 @@ } }, "node_modules/swagger2openapi/node_modules/yargs-parser": { - "version": "21.0.1", - "license": "ISC", + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "engines": { "node": ">=12" } }, + "node_modules/swiper": { + "version": "8.4.4", + "resolved": "https://registry.npmjs.org/swiper/-/swiper-8.4.4.tgz", + "integrity": "sha512-jA/8BfOZwT8PqPSnMX0TENZYitXEhNa7ZSNj1Diqh5LZyUJoBQaZcqAiPQ/PIg1+IPaRn/V8ZYVb0nxHMh51yw==", + "funding": [ + { + "type": "patreon", + "url": "https://www.patreon.com/swiperjs" + }, + { + "type": "open_collective", + "url": "http://opencollective.com/swiper" + } + ], + "hasInstallScript": true, + "dependencies": { + "dom7": "^4.0.4", + "ssr-window": "^4.0.2" + }, + "engines": { + "node": ">= 4.7.0" + } + }, "node_modules/symbol-tree": { "version": "3.2.4", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", + "dev": true }, "node_modules/tapable": { "version": "2.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", "engines": { "node": ">=6" } }, "node_modules/tar-fs": { "version": "2.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", + "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", @@ -17053,7 +20895,8 @@ }, "node_modules/tar-stream": { "version": "2.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", @@ -17067,8 +20910,9 @@ }, "node_modules/terminal-link": { "version": "2.1.1", + "resolved": "https://registry.npmjs.org/terminal-link/-/terminal-link-2.1.1.tgz", + "integrity": "sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==", "dev": true, - "license": "MIT", "dependencies": { "ansi-escapes": "^4.2.1", "supports-hyperlinks": "^2.0.0" @@ -17081,12 +20925,13 @@ } }, "node_modules/terser": { - "version": "5.13.1", - "license": "BSD-2-Clause", + "version": "5.15.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.15.1.tgz", + "integrity": "sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw==", "dependencies": { + "@jridgewell/source-map": "^0.3.2", "acorn": "^8.5.0", "commander": "^2.20.0", - "source-map": "~0.8.0-beta.0", "source-map-support": "~0.5.20" }, "bin": { @@ -17097,14 +20942,15 @@ } }, "node_modules/terser-webpack-plugin": { - "version": "5.3.1", - "license": "MIT", + "version": "5.3.6", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz", + "integrity": "sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ==", "dependencies": { + "@jridgewell/trace-mapping": "^0.3.14", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.0", - "source-map": "^0.6.1", - "terser": "^5.7.2" + "terser": "^5.14.1" }, "engines": { "node": ">= 10.13.0" @@ -17130,7 +20976,8 @@ }, "node_modules/terser-webpack-plugin/node_modules/schema-utils": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", @@ -17146,42 +20993,14 @@ }, "node_modules/terser/node_modules/commander": { "version": "2.20.3", - "license": "MIT" - }, - "node_modules/terser/node_modules/source-map": { - "version": "0.8.0-beta.0", - "license": "BSD-3-Clause", - "dependencies": { - "whatwg-url": "^7.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/terser/node_modules/tr46": { - "version": "1.0.1", - "license": "MIT", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/terser/node_modules/webidl-conversions": { - "version": "4.0.2", - "license": "BSD-2-Clause" - }, - "node_modules/terser/node_modules/whatwg-url": { - "version": "7.1.0", - "license": "MIT", - "dependencies": { - "lodash.sortby": "^4.7.0", - "tr46": "^1.0.1", - "webidl-conversions": "^4.0.2" - } + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" }, "node_modules/test-exclude": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", "dev": true, - "license": "ISC", "dependencies": { "@istanbuljs/schema": "^0.1.2", "glob": "^7.1.4", @@ -17193,20 +21012,36 @@ }, "node_modules/text-table": { "version": "0.2.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" }, "node_modules/throat": { "version": "6.0.1", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/throat/-/throat-6.0.1.tgz", + "integrity": "sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w==", + "dev": true + }, + "node_modules/throttleit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", + "integrity": "sha512-rkTVqu6IjfQ/6+uNuuc3sZek4CEYxTJom3IktzgdSxcZqdARuebbA/f4QmAxMQIxqq9ZLEUkSYqvuk1I6VKq4g==", + "dev": true + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true }, "node_modules/thunky": { "version": "1.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", + "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, "node_modules/timers-browserify": { "version": "2.0.12", - "license": "MIT", + "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz", + "integrity": "sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==", "dependencies": { "setimmediate": "^1.0.4" }, @@ -17216,45 +21051,65 @@ }, "node_modules/timm": { "version": "1.7.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/timm/-/timm-1.7.1.tgz", + "integrity": "sha512-IjZc9KIotudix8bMaBW6QvMuq64BrJWFs1+4V0lXwWGQZwH+LnX87doAYhem4caOEusRP9/g6jVDQmZ8XOk1nw==" }, "node_modules/tiny-invariant": { - "version": "1.2.0", - "license": "MIT" + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.1.tgz", + "integrity": "sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==" }, "node_modules/tiny-warning": { "version": "1.0.3", - "license": "MIT" + "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", + "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" }, "node_modules/tinycolor2": { "version": "1.4.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.4.2.tgz", + "integrity": "sha512-vJhccZPs965sV/L2sU4oRQVAos0pQXwsvTLkWYdqJ+a8Q5kPFzJTuOFwy7UniPli44NKQGAglksjvOcpo95aZA==", "engines": { "node": "*" } }, + "node_modules/tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dev": true, + "dependencies": { + "rimraf": "^3.0.0" + }, + "engines": { + "node": ">=8.17.0" + } + }, "node_modules/tmpl": { "version": "1.0.5", - "dev": true, - "license": "BSD-3-Clause" + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true }, "node_modules/to-fast-properties": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", "engines": { "node": ">=4" } }, "node_modules/to-readable-stream": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", + "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==", "engines": { "node": ">=6" } }, "node_modules/to-regex-range": { "version": "5.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dependencies": { "is-number": "^7.0.0" }, @@ -17264,43 +21119,38 @@ }, "node_modules/toidentifier": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", "engines": { "node": ">=0.6" } }, "node_modules/totalist": { "version": "1.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz", + "integrity": "sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g==", "engines": { "node": ">=6" } }, "node_modules/tough-cookie": { - "version": "4.0.0", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", "dev": true, - "license": "BSD-3-Clause", "dependencies": { - "psl": "^1.1.33", - "punycode": "^2.1.1", - "universalify": "^0.1.2" + "psl": "^1.1.28", + "punycode": "^2.1.1" }, "engines": { - "node": ">=6" - } - }, - "node_modules/tough-cookie/node_modules/universalify": { - "version": "0.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4.0.0" + "node": ">=0.8" } }, "node_modules/tr46": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", "dev": true, - "license": "MIT", "dependencies": { "punycode": "^2.1.1" }, @@ -17309,11 +21159,14 @@ } }, "node_modules/trim": { - "version": "0.0.1" + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", + "integrity": "sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ==" }, "node_modules/trim-trailing-lines": { "version": "1.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz", + "integrity": "sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -17321,7 +21174,8 @@ }, "node_modules/trough": { "version": "1.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", + "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -17329,15 +21183,18 @@ }, "node_modules/tslib": { "version": "2.4.0", - "license": "0BSD" + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "node_modules/tty-browserify": { "version": "0.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz", + "integrity": "sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw==" }, "node_modules/tunnel-agent": { "version": "0.6.0", - "license": "Apache-2.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", "dependencies": { "safe-buffer": "^5.0.1" }, @@ -17345,10 +21202,17 @@ "node": "*" } }, + "node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "dev": true + }, "node_modules/type-check": { "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", "dev": true, - "license": "MIT", "dependencies": { "prelude-ls": "~1.1.2" }, @@ -17358,15 +21222,17 @@ }, "node_modules/type-detect": { "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", "dev": true, - "license": "MIT", "engines": { "node": ">=4" } }, "node_modules/type-fest": { - "version": "2.12.2", - "license": "(MIT OR CC0-1.0)", + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", "engines": { "node": ">=12.20" }, @@ -17376,7 +21242,8 @@ }, "node_modules/type-is": { "version": "1.6.18", - "license": "MIT", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" @@ -17387,14 +21254,16 @@ }, "node_modules/typedarray-to-buffer": { "version": "3.1.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", "dependencies": { "is-typedarray": "^1.0.0" } }, "node_modules/typescript": { - "version": "4.6.4", - "license": "Apache-2.0", + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.4.tgz", + "integrity": "sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==", "peer": true, "bin": { "tsc": "bin/tsc", @@ -17405,7 +21274,9 @@ } }, "node_modules/ua-parser-js": { - "version": "0.7.31", + "version": "0.7.32", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.32.tgz", + "integrity": "sha512-f9BESNVhzlhEFf2CHMSj40NWOjYPl1YKYbrvIr/hFTDEmLq7SRbWvm7FcdcpCYT95zrOhC7gZSxjdnnTpBcwVw==", "funding": [ { "type": "opencollective", @@ -17416,14 +21287,14 @@ "url": "https://paypal.me/faisalman" } ], - "license": "MIT", "engines": { "node": "*" } }, "node_modules/unbox-primitive": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", "dependencies": { "call-bind": "^1.0.2", "has-bigints": "^1.0.2", @@ -17436,7 +21307,8 @@ }, "node_modules/unherit": { "version": "1.1.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz", + "integrity": "sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==", "dependencies": { "inherits": "^2.0.0", "xtend": "^4.0.0" @@ -17448,14 +21320,16 @@ }, "node_modules/unicode-canonical-property-names-ecmascript": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", + "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", "engines": { "node": ">=4" } }, "node_modules/unicode-match-property-ecmascript": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", "dependencies": { "unicode-canonical-property-names-ecmascript": "^2.0.0", "unicode-property-aliases-ecmascript": "^2.0.0" @@ -17466,21 +21340,24 @@ }, "node_modules/unicode-match-property-value-ecmascript": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz", + "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==", "engines": { "node": ">=4" } }, "node_modules/unicode-property-aliases-ecmascript": { - "version": "2.0.0", - "license": "MIT", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", + "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", "engines": { "node": ">=4" } }, "node_modules/unified": { "version": "9.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", + "integrity": "sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==", "dependencies": { "bail": "^1.0.0", "extend": "^3.0.0", @@ -17496,7 +21373,8 @@ }, "node_modules/unique-string": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", "dependencies": { "crypto-random-string": "^2.0.0" }, @@ -17506,7 +21384,20 @@ }, "node_modules/unist-builder": { "version": "2.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz", + "integrity": "sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-find-after": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-3.0.0.tgz", + "integrity": "sha512-ojlBqfsBftYXExNu3+hHLfJQ/X1jYY/9vdm4yZWjIbf0VuWF6CRufci1ZyoD/wV2TYMKxXUoNuoqwy+CkgzAiQ==", + "dependencies": { + "unist-util-is": "^4.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -17514,7 +21405,8 @@ }, "node_modules/unist-util-generated": { "version": "1.1.6", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz", + "integrity": "sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -17522,7 +21414,8 @@ }, "node_modules/unist-util-is": { "version": "4.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", + "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -17530,7 +21423,8 @@ }, "node_modules/unist-util-position": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz", + "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -17538,7 +21432,8 @@ }, "node_modules/unist-util-remove": { "version": "2.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz", + "integrity": "sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q==", "dependencies": { "unist-util-is": "^4.0.0" }, @@ -17549,7 +21444,8 @@ }, "node_modules/unist-util-remove-position": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz", + "integrity": "sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA==", "dependencies": { "unist-util-visit": "^2.0.0" }, @@ -17560,7 +21456,8 @@ }, "node_modules/unist-util-stringify-position": { "version": "2.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", + "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", "dependencies": { "@types/unist": "^2.0.2" }, @@ -17571,7 +21468,8 @@ }, "node_modules/unist-util-visit": { "version": "2.0.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", + "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", "dependencies": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0", @@ -17584,7 +21482,8 @@ }, "node_modules/unist-util-visit-parents": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", + "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", "dependencies": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0" @@ -17596,25 +21495,58 @@ }, "node_modules/universalify": { "version": "2.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", "engines": { "node": ">= 10.0.0" } }, "node_modules/unpipe": { "version": "1.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", "engines": { "node": ">= 0.8" } }, - "node_modules/unquote": { - "version": "1.1.1", - "license": "MIT" + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz", + "integrity": "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + } + ], + "dependencies": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + }, + "bin": { + "browserslist-lint": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } }, "node_modules/update-notifier": { "version": "5.1.0", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz", + "integrity": "sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==", "dependencies": { "boxen": "^5.0.0", "chalk": "^4.1.0", @@ -17640,7 +21572,8 @@ }, "node_modules/update-notifier/node_modules/ansi-styles": { "version": "4.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { "color-convert": "^2.0.1" }, @@ -17653,7 +21586,8 @@ }, "node_modules/update-notifier/node_modules/boxen": { "version": "5.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", + "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", "dependencies": { "ansi-align": "^3.0.0", "camelcase": "^6.2.0", @@ -17673,7 +21607,8 @@ }, "node_modules/update-notifier/node_modules/chalk": { "version": "4.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -17685,9 +21620,15 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/update-notifier/node_modules/ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==" + }, "node_modules/update-notifier/node_modules/cli-boxes": { "version": "2.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", + "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==", "engines": { "node": ">=6" }, @@ -17697,7 +21638,8 @@ }, "node_modules/update-notifier/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, @@ -17707,22 +21649,37 @@ }, "node_modules/update-notifier/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/update-notifier/node_modules/emoji-regex": { "version": "8.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/update-notifier/node_modules/has-flag": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, + "node_modules/update-notifier/node_modules/is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "dependencies": { + "ci-info": "^2.0.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, "node_modules/update-notifier/node_modules/string-width": { "version": "4.2.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -17734,7 +21691,8 @@ }, "node_modules/update-notifier/node_modules/supports-color": { "version": "7.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { "has-flag": "^4.0.0" }, @@ -17744,7 +21702,8 @@ }, "node_modules/update-notifier/node_modules/type-fest": { "version": "0.20.2", - "license": "(MIT OR CC0-1.0)", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "engines": { "node": ">=10" }, @@ -17754,7 +21713,8 @@ }, "node_modules/update-notifier/node_modules/widest-line": { "version": "3.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", + "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", "dependencies": { "string-width": "^4.0.0" }, @@ -17764,7 +21724,8 @@ }, "node_modules/update-notifier/node_modules/wrap-ansi": { "version": "7.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -17779,14 +21740,16 @@ }, "node_modules/uri-js": { "version": "4.4.1", - "license": "BSD-2-Clause", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dependencies": { "punycode": "^2.1.0" } }, "node_modules/url": { "version": "0.11.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", + "integrity": "sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ==", "dependencies": { "punycode": "1.3.2", "querystring": "0.2.0" @@ -17794,7 +21757,8 @@ }, "node_modules/url-loader": { "version": "4.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz", + "integrity": "sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==", "dependencies": { "loader-utils": "^2.0.0", "mime-types": "^2.1.27", @@ -17819,7 +21783,8 @@ }, "node_modules/url-loader/node_modules/schema-utils": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", @@ -17833,9 +21798,20 @@ "url": "https://opencollective.com/webpack" } }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "node_modules/url-parse-lax": { "version": "3.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ==", "dependencies": { "prepend-http": "^2.0.0" }, @@ -17845,28 +21821,35 @@ }, "node_modules/url-template": { "version": "2.0.8", - "license": "BSD" + "resolved": "https://registry.npmjs.org/url-template/-/url-template-2.0.8.tgz", + "integrity": "sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw==" }, "node_modules/url/node_modules/punycode": { "version": "1.3.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" }, "node_modules/url/node_modules/querystring": { "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", "engines": { "node": ">=0.4.x" } }, "node_modules/use-composed-ref": { "version": "1.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.3.0.tgz", + "integrity": "sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ==", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, "node_modules/use-isomorphic-layout-effect": { "version": "1.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz", + "integrity": "sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0" }, @@ -17878,7 +21861,8 @@ }, "node_modules/use-latest": { "version": "1.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/use-latest/-/use-latest-1.2.1.tgz", + "integrity": "sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw==", "dependencies": { "use-isomorphic-layout-effect": "^1.1.1" }, @@ -17893,69 +21877,63 @@ }, "node_modules/utif": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/utif/-/utif-2.0.1.tgz", + "integrity": "sha512-Z/S1fNKCicQTf375lIP9G8Sa1H/phcysstNrrSdZKj1f9g58J4NMgb5IgiEZN9/nLMPDwF0W7hdOe9Qq2IYoLg==", "dependencies": { "pako": "^1.0.5" } }, "node_modules/util": { - "version": "0.12.4", - "license": "MIT", + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", "dependencies": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", - "safe-buffer": "^5.1.2", "which-typed-array": "^1.1.2" } }, "node_modules/util-deprecate": { "version": "1.0.2", - "license": "MIT" - }, - "node_modules/util.promisify": { - "version": "1.0.1", - "license": "MIT", - "dependencies": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.2", - "has-symbols": "^1.0.1", - "object.getownpropertydescriptors": "^2.1.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "node_modules/utila": { "version": "0.4.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", + "integrity": "sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==" }, "node_modules/utility-types": { "version": "3.10.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.10.0.tgz", + "integrity": "sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==", "engines": { "node": ">= 4" } }, "node_modules/utils-merge": { "version": "1.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", "engines": { "node": ">= 0.4.0" } }, "node_modules/uuid": { - "version": "7.0.3", - "license": "MIT", + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", "bin": { "uuid": "dist/bin/uuid" } }, "node_modules/v8-to-istanbul": { "version": "8.1.1", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz", + "integrity": "sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w==", "dev": true, - "license": "ISC", "dependencies": { "@types/istanbul-lib-coverage": "^2.0.1", "convert-source-map": "^1.6.0", @@ -17966,27 +21944,45 @@ } }, "node_modules/v8-to-istanbul/node_modules/source-map": { - "version": "0.7.3", + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", "dev": true, - "license": "BSD-3-Clause", "engines": { "node": ">= 8" } }, "node_modules/value-equal": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", + "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==" }, "node_modules/vary": { "version": "1.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", "engines": { "node": ">= 0.8" } }, + "node_modules/verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, "node_modules/vfile": { "version": "4.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz", + "integrity": "sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==", "dependencies": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", @@ -18000,7 +21996,8 @@ }, "node_modules/vfile-location": { "version": "3.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz", + "integrity": "sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==", "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -18008,7 +22005,8 @@ }, "node_modules/vfile-message": { "version": "2.0.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", + "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", "dependencies": { "@types/unist": "^2.0.0", "unist-util-stringify-position": "^2.0.0" @@ -18020,20 +22018,24 @@ }, "node_modules/vm-browserify": { "version": "1.1.2", - "license": "MIT" + "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", + "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==" }, "node_modules/w3c-hr-time": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", + "integrity": "sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==", + "deprecated": "Use your platform's native performance.now() and performance.timeOrigin.", "dev": true, - "license": "MIT", "dependencies": { "browser-process-hrtime": "^1.0.0" } }, "node_modules/w3c-xmlserializer": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz", + "integrity": "sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==", "dev": true, - "license": "MIT", "dependencies": { "xml-name-validator": "^3.0.0" }, @@ -18043,7 +22045,8 @@ }, "node_modules/wait-on": { "version": "6.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-6.0.1.tgz", + "integrity": "sha512-zht+KASY3usTY5u2LgaNqn/Cd8MukxLGjdcZxT2ns5QzDmTFc4XoWBgC+C/na+sMRZTuVygQoMYwdcVjHnYIVw==", "dependencies": { "axios": "^0.25.0", "joi": "^17.6.0", @@ -18058,17 +22061,27 @@ "node": ">=10.0.0" } }, + "node_modules/wait-on/node_modules/axios": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", + "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "dependencies": { + "follow-redirects": "^1.14.7" + } + }, "node_modules/walker": { "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", "dev": true, - "license": "Apache-2.0", "dependencies": { "makeerror": "1.0.12" } }, "node_modules/watchpack": { - "version": "2.3.1", - "license": "MIT", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz", + "integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==", "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" @@ -18079,14 +22092,16 @@ }, "node_modules/wbuf": { "version": "1.7.3", - "license": "MIT", + "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", + "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", "dependencies": { "minimalistic-assert": "^1.0.0" } }, "node_modules/web-namespaces": { "version": "1.1.4", - "license": "MIT", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz", + "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -18094,39 +22109,41 @@ }, "node_modules/webidl-conversions": { "version": "6.1.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-6.1.0.tgz", + "integrity": "sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==", "dev": true, - "license": "BSD-2-Clause", "engines": { "node": ">=10.4" } }, "node_modules/webpack": { - "version": "5.72.0", - "license": "MIT", + "version": "5.74.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.74.0.tgz", + "integrity": "sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA==", "dependencies": { "@types/eslint-scope": "^3.7.3", "@types/estree": "^0.0.51", "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/wasm-edit": "1.11.1", "@webassemblyjs/wasm-parser": "1.11.1", - "acorn": "^8.4.1", + "acorn": "^8.7.1", "acorn-import-assertions": "^1.7.6", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.9.2", + "enhanced-resolve": "^5.10.0", "es-module-lexer": "^0.9.0", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.9", - "json-parse-better-errors": "^1.0.2", + "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.1.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.1.3", - "watchpack": "^2.3.1", + "watchpack": "^2.4.0", "webpack-sources": "^3.2.3" }, "bin": { @@ -18146,8 +22163,9 @@ } }, "node_modules/webpack-bundle-analyzer": { - "version": "4.5.0", - "license": "MIT", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.6.1.tgz", + "integrity": "sha512-oKz9Oz9j3rUciLNfpGFjOb49/jEpXNmWdVH8Ls//zNcnLlQdTGXQQMsBbb/gR7Zl8WNLxVCq+0Hqbx3zv6twBw==", "dependencies": { "acorn": "^8.0.4", "acorn-walk": "^8.0.0", @@ -18168,14 +22186,16 @@ }, "node_modules/webpack-bundle-analyzer/node_modules/acorn-walk": { "version": "8.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", "engines": { "node": ">=0.4.0" } }, "node_modules/webpack-bundle-analyzer/node_modules/ansi-styles": { "version": "4.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { "color-convert": "^2.0.1" }, @@ -18188,7 +22208,8 @@ }, "node_modules/webpack-bundle-analyzer/node_modules/chalk": { "version": "4.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -18202,7 +22223,8 @@ }, "node_modules/webpack-bundle-analyzer/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, @@ -18212,25 +22234,29 @@ }, "node_modules/webpack-bundle-analyzer/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/webpack-bundle-analyzer/node_modules/commander": { "version": "7.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", "engines": { "node": ">= 10" } }, "node_modules/webpack-bundle-analyzer/node_modules/has-flag": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/webpack-bundle-analyzer/node_modules/supports-color": { "version": "7.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { "has-flag": "^4.0.0" }, @@ -18239,11 +22265,12 @@ } }, "node_modules/webpack-dev-middleware": { - "version": "5.3.1", - "license": "MIT", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", + "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", "dependencies": { "colorette": "^2.0.10", - "memfs": "^3.4.1", + "memfs": "^3.4.3", "mime-types": "^2.1.31", "range-parser": "^1.2.1", "schema-utils": "^4.0.0" @@ -18261,7 +22288,8 @@ }, "node_modules/webpack-dev-middleware/node_modules/ajv": { "version": "8.11.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -18275,7 +22303,8 @@ }, "node_modules/webpack-dev-middleware/node_modules/ajv-keywords": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, @@ -18283,20 +22312,18 @@ "ajv": "^8.8.2" } }, - "node_modules/webpack-dev-middleware/node_modules/colorette": { - "version": "2.0.16", - "license": "MIT" - }, "node_modules/webpack-dev-middleware/node_modules/range-parser": { "version": "1.2.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", "engines": { "node": ">= 0.6" } }, "node_modules/webpack-dev-middleware/node_modules/schema-utils": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -18312,13 +22339,15 @@ } }, "node_modules/webpack-dev-server": { - "version": "4.8.1", - "license": "MIT", + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.11.1.tgz", + "integrity": "sha512-lILVz9tAUy1zGFwieuaQtYiadImb5M3d+H+L1zDYalYoDl0cksAB1UNyuE5MMWJrG6zR1tXkCP2fitl7yoUJiw==", "dependencies": { "@types/bonjour": "^3.5.9", "@types/connect-history-api-fallback": "^1.3.5", "@types/express": "^4.17.13", "@types/serve-index": "^1.9.1", + "@types/serve-static": "^1.13.10", "@types/sockjs": "^0.3.33", "@types/ws": "^8.5.1", "ansi-html-community": "^0.0.8", @@ -18326,7 +22355,7 @@ "chokidar": "^3.5.3", "colorette": "^2.0.10", "compression": "^1.7.4", - "connect-history-api-fallback": "^1.6.0", + "connect-history-api-fallback": "^2.0.0", "default-gateway": "^6.0.3", "express": "^4.17.3", "graceful-fs": "^4.2.6", @@ -18335,12 +22364,11 @@ "ipaddr.js": "^2.0.1", "open": "^8.0.9", "p-retry": "^4.5.0", - "portfinder": "^1.0.28", "rimraf": "^3.0.2", "schema-utils": "^4.0.0", - "selfsigned": "^2.0.1", + "selfsigned": "^2.1.1", "serve-index": "^1.9.1", - "sockjs": "^0.3.21", + "sockjs": "^0.3.24", "spdy": "^4.0.2", "webpack-dev-middleware": "^5.3.1", "ws": "^8.4.2" @@ -18351,6 +22379,10 @@ "engines": { "node": ">= 12.13.0" }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, "peerDependencies": { "webpack": "^4.37.0 || ^5.0.0" }, @@ -18362,7 +22394,8 @@ }, "node_modules/webpack-dev-server/node_modules/ajv": { "version": "8.11.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -18376,7 +22409,8 @@ }, "node_modules/webpack-dev-server/node_modules/ajv-keywords": { "version": "5.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "dependencies": { "fast-deep-equal": "^3.1.3" }, @@ -18384,13 +22418,10 @@ "ajv": "^8.8.2" } }, - "node_modules/webpack-dev-server/node_modules/colorette": { - "version": "2.0.16", - "license": "MIT" - }, "node_modules/webpack-dev-server/node_modules/schema-utils": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dependencies": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -18406,8 +22437,9 @@ } }, "node_modules/webpack-dev-server/node_modules/ws": { - "version": "8.6.0", - "license": "MIT", + "version": "8.9.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.9.0.tgz", + "integrity": "sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg==", "engines": { "node": ">=10.0.0" }, @@ -18426,7 +22458,8 @@ }, "node_modules/webpack-merge": { "version": "5.8.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", + "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", "dependencies": { "clone-deep": "^4.0.1", "wildcard": "^2.0.0" @@ -18436,16 +22469,17 @@ } }, "node_modules/webpack-sources": { - "version": "1.4.3", - "license": "MIT", - "dependencies": { - "source-list-map": "^2.0.0", - "source-map": "~0.6.1" + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "engines": { + "node": ">=10.13.0" } }, "node_modules/webpack/node_modules/schema-utils": { "version": "3.1.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", @@ -18459,16 +22493,10 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/webpack/node_modules/webpack-sources": { - "version": "3.2.3", - "license": "MIT", - "engines": { - "node": ">=10.13.0" - } - }, "node_modules/webpackbar": { "version": "5.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz", + "integrity": "sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ==", "dependencies": { "chalk": "^4.1.0", "consola": "^2.15.3", @@ -18484,7 +22512,8 @@ }, "node_modules/webpackbar/node_modules/ansi-styles": { "version": "4.3.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { "color-convert": "^2.0.1" }, @@ -18497,7 +22526,8 @@ }, "node_modules/webpackbar/node_modules/chalk": { "version": "4.1.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -18511,7 +22541,8 @@ }, "node_modules/webpackbar/node_modules/color-convert": { "version": "2.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { "color-name": "~1.1.4" }, @@ -18521,18 +22552,21 @@ }, "node_modules/webpackbar/node_modules/color-name": { "version": "1.1.4", - "license": "MIT" + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "node_modules/webpackbar/node_modules/has-flag": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "engines": { "node": ">=8" } }, "node_modules/webpackbar/node_modules/supports-color": { "version": "7.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { "has-flag": "^4.0.0" }, @@ -18542,7 +22576,8 @@ }, "node_modules/websocket-driver": { "version": "0.7.4", - "license": "Apache-2.0", + "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", + "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", "dependencies": { "http-parser-js": ">=0.5.1", "safe-buffer": ">=5.1.0", @@ -18554,28 +22589,32 @@ }, "node_modules/websocket-extensions": { "version": "0.1.4", - "license": "Apache-2.0", + "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", + "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", "engines": { "node": ">=0.8.0" } }, "node_modules/whatwg-encoding": { "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", "dev": true, - "license": "MIT", "dependencies": { "iconv-lite": "0.4.24" } }, "node_modules/whatwg-mimetype": { "version": "2.3.0", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true }, "node_modules/whatwg-url": { "version": "8.7.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz", + "integrity": "sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==", "dev": true, - "license": "MIT", "dependencies": { "lodash": "^4.7.0", "tr46": "^2.1.0", @@ -18587,7 +22626,8 @@ }, "node_modules/which": { "version": "2.0.2", - "license": "ISC", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dependencies": { "isexe": "^2.0.0" }, @@ -18600,7 +22640,8 @@ }, "node_modules/which-boxed-primitive": { "version": "1.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", "dependencies": { "is-bigint": "^1.0.1", "is-boolean-object": "^1.1.0", @@ -18613,15 +22654,16 @@ } }, "node_modules/which-typed-array": { - "version": "1.1.7", - "license": "MIT", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.8.tgz", + "integrity": "sha512-Jn4e5PItbcAHyLoRDwvPj1ypu27DJbtdYXUa5zsinrUx77Uvfb0cXwwnGMTn7cjUfhhqgVQnVJCwF+7cgU7tpw==", "dependencies": { "available-typed-arrays": "^1.0.5", "call-bind": "^1.0.2", - "es-abstract": "^1.18.5", - "foreach": "^2.0.5", + "es-abstract": "^1.20.0", + "for-each": "^0.3.3", "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.7" + "is-typed-array": "^1.1.9" }, "engines": { "node": ">= 0.4" @@ -18630,32 +22672,10 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wide-align": { - "version": "1.1.5", - "license": "ISC", - "dependencies": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, - "node_modules/wide-align/node_modules/emoji-regex": { - "version": "8.0.0", - "license": "MIT" - }, - "node_modules/wide-align/node_modules/string-width": { - "version": "4.2.3", - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/widest-line": { "version": "4.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", + "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", "dependencies": { "string-width": "^5.0.1" }, @@ -18668,19 +22688,22 @@ }, "node_modules/wildcard": { "version": "2.0.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==" }, "node_modules/word-wrap": { "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/wrap-ansi": { "version": "8.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.0.1.tgz", + "integrity": "sha512-QFF+ufAqhoYHvoHdajT/Po7KoXVBPXS2bgjIam5isfWJPfIOnQZ50JtUiVvCv/sjgacf3yRrt2ZKUZ/V4itN4g==", "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", @@ -18695,7 +22718,8 @@ }, "node_modules/wrap-ansi/node_modules/ansi-regex": { "version": "6.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", "engines": { "node": ">=12" }, @@ -18704,8 +22728,9 @@ } }, "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "6.1.0", - "license": "MIT", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "engines": { "node": ">=12" }, @@ -18715,7 +22740,8 @@ }, "node_modules/wrap-ansi/node_modules/strip-ansi": { "version": "7.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", + "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -18728,11 +22754,13 @@ }, "node_modules/wrappy": { "version": "1.0.2", - "license": "ISC" + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/write-file-atomic": { "version": "3.0.3", - "license": "ISC", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", "dependencies": { "imurmurhash": "^0.1.4", "is-typedarray": "^1.0.0", @@ -18741,8 +22769,9 @@ } }, "node_modules/ws": { - "version": "7.5.7", - "license": "MIT", + "version": "7.5.9", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", + "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", "engines": { "node": ">=8.3.0" }, @@ -18761,14 +22790,16 @@ }, "node_modules/xdg-basedir": { "version": "4.0.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", "engines": { "node": ">=8" } }, "node_modules/xhr": { "version": "2.6.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/xhr/-/xhr-2.6.0.tgz", + "integrity": "sha512-/eCGLb5rxjx5e3mF1A7s+pLlR6CGyqWN91fv1JgER5mVWg1MZmlhBvy9kjcsOdRk8RrIujotWyJamfyrp+WIcA==", "dependencies": { "global": "~4.4.0", "is-function": "^1.0.1", @@ -18778,7 +22809,8 @@ }, "node_modules/xml-js": { "version": "1.6.11", - "license": "MIT", + "resolved": "https://registry.npmjs.org/xml-js/-/xml-js-1.6.11.tgz", + "integrity": "sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==", "dependencies": { "sax": "^1.2.4" }, @@ -18788,16 +22820,19 @@ }, "node_modules/xml-name-validator": { "version": "3.0.0", - "dev": true, - "license": "Apache-2.0" + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-3.0.0.tgz", + "integrity": "sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==", + "dev": true }, "node_modules/xml-parse-from-string": { "version": "1.0.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz", + "integrity": "sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g==" }, "node_modules/xml2js": { "version": "0.4.23", - "license": "MIT", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", "dependencies": { "sax": ">=0.6.0", "xmlbuilder": "~11.0.0" @@ -18808,49 +22843,57 @@ }, "node_modules/xmlbuilder": { "version": "11.0.1", - "license": "MIT", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", "engines": { "node": ">=4.0" } }, "node_modules/xmlchars": { "version": "2.2.0", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", + "dev": true }, "node_modules/xtend": { "version": "4.0.2", - "license": "MIT", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", "engines": { "node": ">=0.4" } }, "node_modules/y18n": { "version": "5.0.8", - "license": "ISC", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", "engines": { "node": ">=10" } }, "node_modules/yallist": { "version": "4.0.0", - "license": "ISC" + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yaml": { "version": "1.10.2", - "license": "ISC", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", "engines": { "node": ">= 6" } }, "node_modules/yaml-ast-parser": { "version": "0.0.43", - "license": "Apache-2.0" + "resolved": "https://registry.npmjs.org/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz", + "integrity": "sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A==" }, "node_modules/yargs": { "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, - "license": "MIT", "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", @@ -18866,21 +22909,24 @@ }, "node_modules/yargs-parser": { "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true, - "license": "ISC", "engines": { "node": ">=10" } }, "node_modules/yargs/node_modules/emoji-regex": { "version": "8.0.0", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "node_modules/yargs/node_modules/string-width": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, - "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -18890,9 +22936,20 @@ "node": ">=8" } }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "engines": { "node": ">=10" }, @@ -18902,7 +22959,8 @@ }, "node_modules/zwitch": { "version": "1.0.5", - "license": "MIT", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", + "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -18910,140 +22968,190 @@ } }, "dependencies": { + "@adobe/css-tools": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.0.1.tgz", + "integrity": "sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g==", + "dev": true + }, "@algolia/autocomplete-core": { - "version": "1.5.2", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.7.1.tgz", + "integrity": "sha512-eiZw+fxMzNQn01S8dA/hcCpoWCOCwcIIEUtHHdzN5TGB3IpzLbuhqFeTfh2OUhhgkE8Uo17+wH+QJ/wYyQmmzg==", "requires": { - "@algolia/autocomplete-shared": "1.5.2" + "@algolia/autocomplete-shared": "1.7.1" } }, "@algolia/autocomplete-preset-algolia": { - "version": "1.5.2", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.1.tgz", + "integrity": "sha512-pJwmIxeJCymU1M6cGujnaIYcY3QPOVYZOXhFkWVM7IxKzy272BwCvMFMyc5NpG/QmiObBxjo7myd060OeTNJXg==", "requires": { - "@algolia/autocomplete-shared": "1.5.2" + "@algolia/autocomplete-shared": "1.7.1" } }, "@algolia/autocomplete-shared": { - "version": "1.5.2" + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.1.tgz", + "integrity": "sha512-eTmGVqY3GeyBTT8IWiB2K5EuURAqhnumfktAEoHxfDY2o7vg2rSnO16ZtIG0fMgt3py28Vwgq42/bVEuaQV7pg==" }, "@algolia/cache-browser-local-storage": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.14.2.tgz", + "integrity": "sha512-FRweBkK/ywO+GKYfAWbrepewQsPTIEirhi1BdykX9mxvBPtGNKccYAxvGdDCumU1jL4r3cayio4psfzKMejBlA==", "requires": { - "@algolia/cache-common": "4.13.0" + "@algolia/cache-common": "4.14.2" } }, "@algolia/cache-common": { - "version": "4.13.0" + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.14.2.tgz", + "integrity": "sha512-SbvAlG9VqNanCErr44q6lEKD2qoK4XtFNx9Qn8FK26ePCI8I9yU7pYB+eM/cZdS9SzQCRJBbHUumVr4bsQ4uxg==" }, "@algolia/cache-in-memory": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.14.2.tgz", + "integrity": "sha512-HrOukWoop9XB/VFojPv1R5SVXowgI56T9pmezd/djh2JnVN/vXswhXV51RKy4nCpqxyHt/aGFSq2qkDvj6KiuQ==", "requires": { - "@algolia/cache-common": "4.13.0" + "@algolia/cache-common": "4.14.2" } }, "@algolia/client-account": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.14.2.tgz", + "integrity": "sha512-WHtriQqGyibbb/Rx71YY43T0cXqyelEU0lB2QMBRXvD2X0iyeGl4qMxocgEIcbHyK7uqE7hKgjT8aBrHqhgc1w==", "requires": { - "@algolia/client-common": "4.13.0", - "@algolia/client-search": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/client-common": "4.14.2", + "@algolia/client-search": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "@algolia/client-analytics": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.14.2.tgz", + "integrity": "sha512-yBvBv2mw+HX5a+aeR0dkvUbFZsiC4FKSnfqk9rrfX+QrlNOKEhCG0tJzjiOggRW4EcNqRmaTULIYvIzQVL2KYQ==", "requires": { - "@algolia/client-common": "4.13.0", - "@algolia/client-search": "4.13.0", - "@algolia/requester-common": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/client-common": "4.14.2", + "@algolia/client-search": "4.14.2", + "@algolia/requester-common": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "@algolia/client-common": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.14.2.tgz", + "integrity": "sha512-43o4fslNLcktgtDMVaT5XwlzsDPzlqvqesRi4MjQz2x4/Sxm7zYg5LRYFol1BIhG6EwxKvSUq8HcC/KxJu3J0Q==", "requires": { - "@algolia/requester-common": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/requester-common": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "@algolia/client-personalization": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.14.2.tgz", + "integrity": "sha512-ACCoLi0cL8CBZ1W/2juehSltrw2iqsQBnfiu/Rbl9W2yE6o2ZUb97+sqN/jBqYNQBS+o0ekTMKNkQjHHAcEXNw==", "requires": { - "@algolia/client-common": "4.13.0", - "@algolia/requester-common": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/client-common": "4.14.2", + "@algolia/requester-common": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "@algolia/client-search": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.14.2.tgz", + "integrity": "sha512-L5zScdOmcZ6NGiVbLKTvP02UbxZ0njd5Vq9nJAmPFtjffUSOGEp11BmD2oMJ5QvARgx2XbX4KzTTNS5ECYIMWw==", "requires": { - "@algolia/client-common": "4.13.0", - "@algolia/requester-common": "4.13.0", - "@algolia/transporter": "4.13.0" + "@algolia/client-common": "4.14.2", + "@algolia/requester-common": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "@algolia/events": { - "version": "4.0.1" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz", + "integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==" }, "@algolia/logger-common": { - "version": "4.13.0" + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.14.2.tgz", + "integrity": "sha512-/JGlYvdV++IcMHBnVFsqEisTiOeEr6cUJtpjz8zc0A9c31JrtLm318Njc72p14Pnkw3A/5lHHh+QxpJ6WFTmsA==" }, "@algolia/logger-console": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.14.2.tgz", + "integrity": "sha512-8S2PlpdshbkwlLCSAB5f8c91xyc84VM9Ar9EdfE9UmX+NrKNYnWR1maXXVDQQoto07G1Ol/tYFnFVhUZq0xV/g==", "requires": { - "@algolia/logger-common": "4.13.0" + "@algolia/logger-common": "4.14.2" } }, "@algolia/requester-browser-xhr": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.14.2.tgz", + "integrity": "sha512-CEh//xYz/WfxHFh7pcMjQNWgpl4wFB85lUMRyVwaDPibNzQRVcV33YS+63fShFWc2+42YEipFGH2iPzlpszmDw==", "requires": { - "@algolia/requester-common": "4.13.0" + "@algolia/requester-common": "4.14.2" } }, "@algolia/requester-common": { - "version": "4.13.0" + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.14.2.tgz", + "integrity": "sha512-73YQsBOKa5fvVV3My7iZHu1sUqmjjfs9TteFWwPwDmnad7T0VTCopttcsM3OjLxZFtBnX61Xxl2T2gmG2O4ehg==" }, "@algolia/requester-node-http": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.14.2.tgz", + "integrity": "sha512-oDbb02kd1o5GTEld4pETlPZLY0e+gOSWjWMJHWTgDXbv9rm/o2cF7japO6Vj1ENnrqWvLBmW1OzV9g6FUFhFXg==", "requires": { - "@algolia/requester-common": "4.13.0" + "@algolia/requester-common": "4.14.2" } }, "@algolia/transporter": { - "version": "4.13.0", + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.14.2.tgz", + "integrity": "sha512-t89dfQb2T9MFQHidjHcfhh6iGMNwvuKUvojAj+JsrHAGbuSy7yE4BylhLX6R0Q1xYRoC4Vvv+O5qIw/LdnQfsQ==", "requires": { - "@algolia/cache-common": "4.13.0", - "@algolia/logger-common": "4.13.0", - "@algolia/requester-common": "4.13.0" + "@algolia/cache-common": "4.14.2", + "@algolia/logger-common": "4.14.2", + "@algolia/requester-common": "4.14.2" } }, "@ampproject/remapping": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", + "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", "requires": { "@jridgewell/gen-mapping": "^0.1.0", "@jridgewell/trace-mapping": "^0.3.9" } }, "@babel/code-frame": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", "requires": { - "@babel/highlight": "^7.16.7" + "@babel/highlight": "^7.18.6" } }, "@babel/compat-data": { - "version": "7.17.10" + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.4.tgz", + "integrity": "sha512-CHIGpJcUQ5lU9KrPHTjBMhVwQG6CQjxfg36fGXl3qk/Gik1WwWachaXFuo0uCWJT/mStOKtcbFJCaVLihC1CMw==" }, "@babel/core": { - "version": "7.17.10", + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz", + "integrity": "sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==", "requires": { "@ampproject/remapping": "^2.1.0", - "@babel/code-frame": "^7.16.7", - "@babel/generator": "^7.17.10", - "@babel/helper-compilation-targets": "^7.17.10", - "@babel/helper-module-transforms": "^7.17.7", - "@babel/helpers": "^7.17.9", - "@babel/parser": "^7.17.10", - "@babel/template": "^7.16.7", - "@babel/traverse": "^7.17.10", - "@babel/types": "^7.17.10", + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.3", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-module-transforms": "^7.19.0", + "@babel/helpers": "^7.19.0", + "@babel/parser": "^7.19.3", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.3", + "@babel/types": "^7.19.3", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -19052,71 +23160,99 @@ }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/generator": { - "version": "7.17.10", + "version": "7.19.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.19.5.tgz", + "integrity": "sha512-DxbNz9Lz4aMZ99qPpO1raTbcrI1ZeYh+9NR9qhfkQIbFtVEqotHojEBxHzmxhVONkGt6VyrqVQcgpefMy9pqcg==", "requires": { - "@babel/types": "^7.17.10", - "@jridgewell/gen-mapping": "^0.1.0", + "@babel/types": "^7.19.4", + "@jridgewell/gen-mapping": "^0.3.2", "jsesc": "^2.5.1" + }, + "dependencies": { + "@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "requires": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } + } } }, "@babel/helper-annotate-as-pure": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz", + "integrity": "sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==", "requires": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" } }, "@babel/helper-builder-binary-assignment-operator-visitor": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz", + "integrity": "sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw==", "requires": { - "@babel/helper-explode-assignable-expression": "^7.16.7", - "@babel/types": "^7.16.7" + "@babel/helper-explode-assignable-expression": "^7.18.6", + "@babel/types": "^7.18.9" } }, "@babel/helper-compilation-targets": { - "version": "7.17.10", + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz", + "integrity": "sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==", "requires": { - "@babel/compat-data": "^7.17.10", - "@babel/helper-validator-option": "^7.16.7", - "browserslist": "^4.20.2", + "@babel/compat-data": "^7.19.3", + "@babel/helper-validator-option": "^7.18.6", + "browserslist": "^4.21.3", "semver": "^6.3.0" }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/helper-create-class-features-plugin": { - "version": "7.17.9", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.19.0.tgz", + "integrity": "sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw==", "requires": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-function-name": "^7.17.9", - "@babel/helper-member-expression-to-functions": "^7.17.7", - "@babel/helper-optimise-call-expression": "^7.16.7", - "@babel/helper-replace-supers": "^7.16.7", - "@babel/helper-split-export-declaration": "^7.16.7" + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.19.0", + "@babel/helper-member-expression-to-functions": "^7.18.9", + "@babel/helper-optimise-call-expression": "^7.18.6", + "@babel/helper-replace-supers": "^7.18.9", + "@babel/helper-split-export-declaration": "^7.18.6" } }, "@babel/helper-create-regexp-features-plugin": { - "version": "7.17.0", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.19.0.tgz", + "integrity": "sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw==", "requires": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "regexpu-core": "^5.0.1" + "@babel/helper-annotate-as-pure": "^7.18.6", + "regexpu-core": "^5.1.0" } }, "@babel/helper-define-polyfill-provider": { - "version": "0.3.1", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz", + "integrity": "sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww==", "requires": { - "@babel/helper-compilation-targets": "^7.13.0", - "@babel/helper-module-imports": "^7.12.13", - "@babel/helper-plugin-utils": "^7.13.0", - "@babel/traverse": "^7.13.0", + "@babel/helper-compilation-targets": "^7.17.7", + "@babel/helper-plugin-utils": "^7.16.7", "debug": "^4.1.1", "lodash.debounce": "^4.0.8", "resolve": "^1.14.2", @@ -19124,274 +23260,358 @@ }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/helper-environment-visitor": { - "version": "7.16.7", - "requires": { - "@babel/types": "^7.16.7" - } + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", + "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==" }, "@babel/helper-explode-assignable-expression": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz", + "integrity": "sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==", "requires": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" } }, "@babel/helper-function-name": { - "version": "7.17.9", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz", + "integrity": "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==", "requires": { - "@babel/template": "^7.16.7", - "@babel/types": "^7.17.0" + "@babel/template": "^7.18.10", + "@babel/types": "^7.19.0" } }, "@babel/helper-hoist-variables": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", + "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", "requires": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" } }, "@babel/helper-member-expression-to-functions": { - "version": "7.17.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz", + "integrity": "sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg==", "requires": { - "@babel/types": "^7.17.0" + "@babel/types": "^7.18.9" } }, "@babel/helper-module-imports": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", + "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", "requires": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" } }, "@babel/helper-module-transforms": { - "version": "7.17.7", - "requires": { - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-module-imports": "^7.16.7", - "@babel/helper-simple-access": "^7.17.7", - "@babel/helper-split-export-declaration": "^7.16.7", - "@babel/helper-validator-identifier": "^7.16.7", - "@babel/template": "^7.16.7", - "@babel/traverse": "^7.17.3", - "@babel/types": "^7.17.0" + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz", + "integrity": "sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==", + "requires": { + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-simple-access": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/helper-validator-identifier": "^7.18.6", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.0", + "@babel/types": "^7.19.0" } }, "@babel/helper-optimise-call-expression": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz", + "integrity": "sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==", "requires": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" } }, "@babel/helper-plugin-utils": { - "version": "7.16.7" + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz", + "integrity": "sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==" }, "@babel/helper-remap-async-to-generator": { - "version": "7.16.8", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz", + "integrity": "sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==", "requires": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-wrap-function": "^7.16.8", - "@babel/types": "^7.16.8" + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-wrap-function": "^7.18.9", + "@babel/types": "^7.18.9" } }, "@babel/helper-replace-supers": { - "version": "7.16.7", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.19.1.tgz", + "integrity": "sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw==", "requires": { - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-member-expression-to-functions": "^7.16.7", - "@babel/helper-optimise-call-expression": "^7.16.7", - "@babel/traverse": "^7.16.7", - "@babel/types": "^7.16.7" + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-member-expression-to-functions": "^7.18.9", + "@babel/helper-optimise-call-expression": "^7.18.6", + "@babel/traverse": "^7.19.1", + "@babel/types": "^7.19.0" } }, "@babel/helper-simple-access": { - "version": "7.17.7", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.19.4.tgz", + "integrity": "sha512-f9Xq6WqBFqaDfbCzn2w85hwklswz5qsKlh7f08w4Y9yhJHpnNC0QemtSkK5YyOY8kPGvyiwdzZksGUhnGdaUIg==", "requires": { - "@babel/types": "^7.17.0" + "@babel/types": "^7.19.4" } }, "@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.16.0", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz", + "integrity": "sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw==", "requires": { - "@babel/types": "^7.16.0" + "@babel/types": "^7.18.9" } }, "@babel/helper-split-export-declaration": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", + "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", "requires": { - "@babel/types": "^7.16.7" + "@babel/types": "^7.18.6" } }, + "@babel/helper-string-parser": { + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz", + "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==" + }, "@babel/helper-validator-identifier": { - "version": "7.16.7" + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==" }, "@babel/helper-validator-option": { - "version": "7.16.7" + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", + "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==" }, "@babel/helper-wrap-function": { - "version": "7.16.8", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.19.0.tgz", + "integrity": "sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg==", "requires": { - "@babel/helper-function-name": "^7.16.7", - "@babel/template": "^7.16.7", - "@babel/traverse": "^7.16.8", - "@babel/types": "^7.16.8" + "@babel/helper-function-name": "^7.19.0", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.0", + "@babel/types": "^7.19.0" } }, "@babel/helpers": { - "version": "7.17.9", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.19.4.tgz", + "integrity": "sha512-G+z3aOx2nfDHwX/kyVii5fJq+bgscg89/dJNWpYeKeBv3v9xX8EIabmx1k6u9LS04H7nROFVRVK+e3k0VHp+sw==", "requires": { - "@babel/template": "^7.16.7", - "@babel/traverse": "^7.17.9", - "@babel/types": "^7.17.0" + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.4", + "@babel/types": "^7.19.4" } }, "@babel/highlight": { - "version": "7.17.9", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", "requires": { - "@babel/helper-validator-identifier": "^7.16.7", + "@babel/helper-validator-identifier": "^7.18.6", "chalk": "^2.0.0", "js-tokens": "^4.0.0" } }, "@babel/parser": { - "version": "7.17.10" + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.19.4.tgz", + "integrity": "sha512-qpVT7gtuOLjWeDTKLkJ6sryqLliBaFpAtGeqw5cs5giLldvh+Ch0plqnUMKoVAUS6ZEueQQiZV+p5pxtPitEsA==" }, "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz", + "integrity": "sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz", + "integrity": "sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0", - "@babel/plugin-proposal-optional-chaining": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.18.9", + "@babel/plugin-proposal-optional-chaining": "^7.18.9" } }, "@babel/plugin-proposal-async-generator-functions": { - "version": "7.16.8", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.19.1.tgz", + "integrity": "sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-remap-async-to-generator": "^7.16.8", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-remap-async-to-generator": "^7.18.9", "@babel/plugin-syntax-async-generators": "^7.8.4" } }, "@babel/plugin-proposal-class-properties": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", + "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", "requires": { - "@babel/helper-create-class-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-proposal-class-static-block": { - "version": "7.17.6", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz", + "integrity": "sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==", "requires": { - "@babel/helper-create-class-features-plugin": "^7.17.6", - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-class-static-block": "^7.14.5" } }, "@babel/plugin-proposal-dynamic-import": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz", + "integrity": "sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-dynamic-import": "^7.8.3" } }, "@babel/plugin-proposal-export-namespace-from": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz", + "integrity": "sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.9", "@babel/plugin-syntax-export-namespace-from": "^7.8.3" } }, "@babel/plugin-proposal-json-strings": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz", + "integrity": "sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3" } }, "@babel/plugin-proposal-logical-assignment-operators": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz", + "integrity": "sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.9", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" } }, "@babel/plugin-proposal-nullish-coalescing-operator": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz", + "integrity": "sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" } }, "@babel/plugin-proposal-numeric-separator": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz", + "integrity": "sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-numeric-separator": "^7.10.4" } }, "@babel/plugin-proposal-object-rest-spread": { - "version": "7.17.3", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.19.4.tgz", + "integrity": "sha512-wHmj6LDxVDnL+3WhXteUBaoM1aVILZODAUjg11kHqG4cOlfgMQGxw6aCgvrXrmaJR3Bn14oZhImyCPZzRpC93Q==", "requires": { - "@babel/compat-data": "^7.17.0", - "@babel/helper-compilation-targets": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/compat-data": "^7.19.4", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-plugin-utils": "^7.19.0", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-transform-parameters": "^7.16.7" + "@babel/plugin-transform-parameters": "^7.18.8" } }, "@babel/plugin-proposal-optional-catch-binding": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz", + "integrity": "sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" } }, "@babel/plugin-proposal-optional-chaining": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz", + "integrity": "sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0", + "@babel/helper-plugin-utils": "^7.18.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.18.9", "@babel/plugin-syntax-optional-chaining": "^7.8.3" } }, "@babel/plugin-proposal-private-methods": { - "version": "7.16.11", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz", + "integrity": "sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==", "requires": { - "@babel/helper-create-class-features-plugin": "^7.16.10", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-proposal-private-property-in-object": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz", + "integrity": "sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw==", "requires": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-create-class-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-private-property-in-object": "^7.14.5" } }, "@babel/plugin-proposal-unicode-property-regex": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz", + "integrity": "sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==", "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-syntax-async-generators": { "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-bigint": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", "dev": true, "requires": { "@babel/helper-plugin-utils": "^7.8.0" @@ -19399,30 +23619,48 @@ }, "@babel/plugin-syntax-class-properties": { "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", "requires": { "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-syntax-class-static-block": { "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", "requires": { "@babel/helper-plugin-utils": "^7.14.5" } }, "@babel/plugin-syntax-dynamic-import": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-export-namespace-from": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", "requires": { "@babel/helper-plugin-utils": "^7.8.3" } }, + "@babel/plugin-syntax-import-assertions": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz", + "integrity": "sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.18.6" + } + }, "@babel/plugin-syntax-import-meta": { "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", "dev": true, "requires": { "@babel/helper-plugin-utils": "^7.10.4" @@ -19430,177 +23668,232 @@ }, "@babel/plugin-syntax-json-strings": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-jsx": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz", + "integrity": "sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-syntax-logical-assignment-operators": { "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" } }, "@babel/plugin-syntax-nullish-coalescing-operator": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-numeric-separator": { "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" } }, "@babel/plugin-syntax-object-rest-spread": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-optional-catch-binding": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-optional-chaining": { "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-private-property-in-object": { "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", "requires": { "@babel/helper-plugin-utils": "^7.14.5" } }, "@babel/plugin-syntax-top-level-await": { "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", "requires": { "@babel/helper-plugin-utils": "^7.14.5" } }, "@babel/plugin-syntax-typescript": { - "version": "7.17.10", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz", + "integrity": "sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-arrow-functions": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz", + "integrity": "sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-async-to-generator": { - "version": "7.16.8", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz", + "integrity": "sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag==", "requires": { - "@babel/helper-module-imports": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-remap-async-to-generator": "^7.16.8" + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-remap-async-to-generator": "^7.18.6" } }, "@babel/plugin-transform-block-scoped-functions": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz", + "integrity": "sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-block-scoping": { - "version": "7.16.7", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.19.4.tgz", + "integrity": "sha512-934S2VLLlt2hRJwPf4MczaOr4hYF0z+VKPwqTNxyKX7NthTiPfhuKFWQZHXRM0vh/wo/VyXB3s4bZUNA08l+tQ==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.19.0" } }, "@babel/plugin-transform-classes": { - "version": "7.16.7", - "requires": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-function-name": "^7.16.7", - "@babel/helper-optimise-call-expression": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-replace-supers": "^7.16.7", - "@babel/helper-split-export-declaration": "^7.16.7", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.19.0.tgz", + "integrity": "sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-compilation-targets": "^7.19.0", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.19.0", + "@babel/helper-optimise-call-expression": "^7.18.6", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-replace-supers": "^7.18.9", + "@babel/helper-split-export-declaration": "^7.18.6", "globals": "^11.1.0" } }, "@babel/plugin-transform-computed-properties": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz", + "integrity": "sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" } }, "@babel/plugin-transform-destructuring": { - "version": "7.17.7", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.19.4.tgz", + "integrity": "sha512-t0j0Hgidqf0aM86dF8U+vXYReUgJnlv4bZLsyoPnwZNrGY+7/38o8YjaELrvHeVfTZao15kjR0PVv0nju2iduA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.19.0" } }, "@babel/plugin-transform-dotall-regex": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz", + "integrity": "sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==", "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-duplicate-keys": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz", + "integrity": "sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" } }, "@babel/plugin-transform-exponentiation-operator": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz", + "integrity": "sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==", "requires": { - "@babel/helper-builder-binary-assignment-operator-visitor": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-for-of": { - "version": "7.16.7", + "version": "7.18.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz", + "integrity": "sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-function-name": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz", + "integrity": "sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==", "requires": { - "@babel/helper-compilation-targets": "^7.16.7", - "@babel/helper-function-name": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-compilation-targets": "^7.18.9", + "@babel/helper-function-name": "^7.18.9", + "@babel/helper-plugin-utils": "^7.18.9" } }, "@babel/plugin-transform-literals": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz", + "integrity": "sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" } }, "@babel/plugin-transform-member-expression-literals": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz", + "integrity": "sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-modules-amd": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz", + "integrity": "sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg==", "requires": { - "@babel/helper-module-transforms": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", + "@babel/helper-module-transforms": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "dependencies": { "babel-plugin-dynamic-import-node": { "version": "2.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "requires": { "object.assign": "^4.1.0" } @@ -19608,16 +23901,20 @@ } }, "@babel/plugin-transform-modules-commonjs": { - "version": "7.17.9", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz", + "integrity": "sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q==", "requires": { - "@babel/helper-module-transforms": "^7.17.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-simple-access": "^7.17.7", + "@babel/helper-module-transforms": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-simple-access": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "dependencies": { "babel-plugin-dynamic-import-node": { "version": "2.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "requires": { "object.assign": "^4.1.0" } @@ -19625,17 +23922,21 @@ } }, "@babel/plugin-transform-modules-systemjs": { - "version": "7.17.8", - "requires": { - "@babel/helper-hoist-variables": "^7.16.7", - "@babel/helper-module-transforms": "^7.17.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-validator-identifier": "^7.16.7", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.19.0.tgz", + "integrity": "sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A==", + "requires": { + "@babel/helper-hoist-variables": "^7.18.6", + "@babel/helper-module-transforms": "^7.19.0", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-validator-identifier": "^7.18.6", "babel-plugin-dynamic-import-node": "^2.3.3" }, "dependencies": { "babel-plugin-dynamic-import-node": { "version": "2.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "requires": { "object.assign": "^4.1.0" } @@ -19643,187 +23944,238 @@ } }, "@babel/plugin-transform-modules-umd": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz", + "integrity": "sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==", "requires": { - "@babel/helper-module-transforms": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-module-transforms": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.17.10", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.19.1.tgz", + "integrity": "sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw==", "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.17.0" + "@babel/helper-create-regexp-features-plugin": "^7.19.0", + "@babel/helper-plugin-utils": "^7.19.0" } }, "@babel/plugin-transform-new-target": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz", + "integrity": "sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-object-super": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz", + "integrity": "sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-replace-supers": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-replace-supers": "^7.18.6" } }, "@babel/plugin-transform-parameters": { - "version": "7.16.7", + "version": "7.18.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz", + "integrity": "sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-property-literals": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz", + "integrity": "sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-react-constant-elements": { - "version": "7.17.6", + "version": "7.18.12", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.12.tgz", + "integrity": "sha512-Q99U9/ttiu+LMnRU8psd23HhvwXmKWDQIpocm0JKaICcZHnw+mdQbHm6xnSy7dOl8I5PELakYtNBubNQlBXbZw==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" } }, "@babel/plugin-transform-react-display-name": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz", + "integrity": "sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-react-jsx": { - "version": "7.17.3", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.19.0.tgz", + "integrity": "sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg==", "requires": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-module-imports": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/plugin-syntax-jsx": "^7.16.7", - "@babel/types": "^7.17.0" + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/plugin-syntax-jsx": "^7.18.6", + "@babel/types": "^7.19.0" } }, "@babel/plugin-transform-react-jsx-development": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz", + "integrity": "sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==", "requires": { - "@babel/plugin-transform-react-jsx": "^7.16.7" + "@babel/plugin-transform-react-jsx": "^7.18.6" } }, "@babel/plugin-transform-react-pure-annotations": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz", + "integrity": "sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==", "requires": { - "@babel/helper-annotate-as-pure": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-regenerator": { - "version": "7.17.9", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz", + "integrity": "sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ==", "requires": { + "@babel/helper-plugin-utils": "^7.18.6", "regenerator-transform": "^0.15.0" } }, "@babel/plugin-transform-reserved-words": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz", + "integrity": "sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-runtime": { - "version": "7.17.10", - "requires": { - "@babel/helper-module-imports": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "babel-plugin-polyfill-corejs2": "^0.3.0", - "babel-plugin-polyfill-corejs3": "^0.5.0", - "babel-plugin-polyfill-regenerator": "^0.3.0", + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.19.1.tgz", + "integrity": "sha512-2nJjTUFIzBMP/f/miLxEK9vxwW/KUXsdvN4sR//TmuDhe6yU2h57WmIOE12Gng3MDP/xpjUV/ToZRdcf8Yj4fA==", + "requires": { + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-plugin-utils": "^7.19.0", + "babel-plugin-polyfill-corejs2": "^0.3.3", + "babel-plugin-polyfill-corejs3": "^0.6.0", + "babel-plugin-polyfill-regenerator": "^0.4.1", "semver": "^6.3.0" }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/plugin-transform-shorthand-properties": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz", + "integrity": "sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-spread": { - "version": "7.16.7", + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.19.0.tgz", + "integrity": "sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0" + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.18.9" } }, "@babel/plugin-transform-sticky-regex": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz", + "integrity": "sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/plugin-transform-template-literals": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz", + "integrity": "sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" } }, "@babel/plugin-transform-typeof-symbol": { - "version": "7.16.7", + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz", + "integrity": "sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" } }, "@babel/plugin-transform-typescript": { - "version": "7.16.8", + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.19.3.tgz", + "integrity": "sha512-z6fnuK9ve9u/0X0rRvI9MY0xg+DOUaABDYOe+/SQTxtlptaBB/V9JIUxJn6xp3lMBeb9qe8xSFmHU35oZDXD+w==", "requires": { - "@babel/helper-create-class-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/plugin-syntax-typescript": "^7.16.7" + "@babel/helper-create-class-features-plugin": "^7.19.0", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/plugin-syntax-typescript": "^7.18.6" } }, "@babel/plugin-transform-unicode-escapes": { - "version": "7.16.7", + "version": "7.18.10", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz", + "integrity": "sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.9" } }, "@babel/plugin-transform-unicode-regex": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz", + "integrity": "sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==", "requires": { - "@babel/helper-create-regexp-features-plugin": "^7.16.7", - "@babel/helper-plugin-utils": "^7.16.7" + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" } }, "@babel/preset-env": { - "version": "7.17.10", - "requires": { - "@babel/compat-data": "^7.17.10", - "@babel/helper-compilation-targets": "^7.17.10", - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-validator-option": "^7.16.7", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.16.7", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.16.7", - "@babel/plugin-proposal-async-generator-functions": "^7.16.8", - "@babel/plugin-proposal-class-properties": "^7.16.7", - "@babel/plugin-proposal-class-static-block": "^7.17.6", - "@babel/plugin-proposal-dynamic-import": "^7.16.7", - "@babel/plugin-proposal-export-namespace-from": "^7.16.7", - "@babel/plugin-proposal-json-strings": "^7.16.7", - "@babel/plugin-proposal-logical-assignment-operators": "^7.16.7", - "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7", - "@babel/plugin-proposal-numeric-separator": "^7.16.7", - "@babel/plugin-proposal-object-rest-spread": "^7.17.3", - "@babel/plugin-proposal-optional-catch-binding": "^7.16.7", - "@babel/plugin-proposal-optional-chaining": "^7.16.7", - "@babel/plugin-proposal-private-methods": "^7.16.11", - "@babel/plugin-proposal-private-property-in-object": "^7.16.7", - "@babel/plugin-proposal-unicode-property-regex": "^7.16.7", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.19.4.tgz", + "integrity": "sha512-5QVOTXUdqTCjQuh2GGtdd7YEhoRXBMVGROAtsBeLGIbIz3obCBIfRMT1I3ZKkMgNzwkyCkftDXSSkHxnfVf4qg==", + "requires": { + "@babel/compat-data": "^7.19.4", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-plugin-utils": "^7.19.0", + "@babel/helper-validator-option": "^7.18.6", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.18.6", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.18.9", + "@babel/plugin-proposal-async-generator-functions": "^7.19.1", + "@babel/plugin-proposal-class-properties": "^7.18.6", + "@babel/plugin-proposal-class-static-block": "^7.18.6", + "@babel/plugin-proposal-dynamic-import": "^7.18.6", + "@babel/plugin-proposal-export-namespace-from": "^7.18.9", + "@babel/plugin-proposal-json-strings": "^7.18.6", + "@babel/plugin-proposal-logical-assignment-operators": "^7.18.9", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.18.6", + "@babel/plugin-proposal-numeric-separator": "^7.18.6", + "@babel/plugin-proposal-object-rest-spread": "^7.19.4", + "@babel/plugin-proposal-optional-catch-binding": "^7.18.6", + "@babel/plugin-proposal-optional-chaining": "^7.18.9", + "@babel/plugin-proposal-private-methods": "^7.18.6", + "@babel/plugin-proposal-private-property-in-object": "^7.18.6", + "@babel/plugin-proposal-unicode-property-regex": "^7.18.6", "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-class-properties": "^7.12.13", "@babel/plugin-syntax-class-static-block": "^7.14.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-import-assertions": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", @@ -19833,54 +24185,58 @@ "@babel/plugin-syntax-optional-chaining": "^7.8.3", "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5", - "@babel/plugin-transform-arrow-functions": "^7.16.7", - "@babel/plugin-transform-async-to-generator": "^7.16.8", - "@babel/plugin-transform-block-scoped-functions": "^7.16.7", - "@babel/plugin-transform-block-scoping": "^7.16.7", - "@babel/plugin-transform-classes": "^7.16.7", - "@babel/plugin-transform-computed-properties": "^7.16.7", - "@babel/plugin-transform-destructuring": "^7.17.7", - "@babel/plugin-transform-dotall-regex": "^7.16.7", - "@babel/plugin-transform-duplicate-keys": "^7.16.7", - "@babel/plugin-transform-exponentiation-operator": "^7.16.7", - "@babel/plugin-transform-for-of": "^7.16.7", - "@babel/plugin-transform-function-name": "^7.16.7", - "@babel/plugin-transform-literals": "^7.16.7", - "@babel/plugin-transform-member-expression-literals": "^7.16.7", - "@babel/plugin-transform-modules-amd": "^7.16.7", - "@babel/plugin-transform-modules-commonjs": "^7.17.9", - "@babel/plugin-transform-modules-systemjs": "^7.17.8", - "@babel/plugin-transform-modules-umd": "^7.16.7", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.17.10", - "@babel/plugin-transform-new-target": "^7.16.7", - "@babel/plugin-transform-object-super": "^7.16.7", - "@babel/plugin-transform-parameters": "^7.16.7", - "@babel/plugin-transform-property-literals": "^7.16.7", - "@babel/plugin-transform-regenerator": "^7.17.9", - "@babel/plugin-transform-reserved-words": "^7.16.7", - "@babel/plugin-transform-shorthand-properties": "^7.16.7", - "@babel/plugin-transform-spread": "^7.16.7", - "@babel/plugin-transform-sticky-regex": "^7.16.7", - "@babel/plugin-transform-template-literals": "^7.16.7", - "@babel/plugin-transform-typeof-symbol": "^7.16.7", - "@babel/plugin-transform-unicode-escapes": "^7.16.7", - "@babel/plugin-transform-unicode-regex": "^7.16.7", + "@babel/plugin-transform-arrow-functions": "^7.18.6", + "@babel/plugin-transform-async-to-generator": "^7.18.6", + "@babel/plugin-transform-block-scoped-functions": "^7.18.6", + "@babel/plugin-transform-block-scoping": "^7.19.4", + "@babel/plugin-transform-classes": "^7.19.0", + "@babel/plugin-transform-computed-properties": "^7.18.9", + "@babel/plugin-transform-destructuring": "^7.19.4", + "@babel/plugin-transform-dotall-regex": "^7.18.6", + "@babel/plugin-transform-duplicate-keys": "^7.18.9", + "@babel/plugin-transform-exponentiation-operator": "^7.18.6", + "@babel/plugin-transform-for-of": "^7.18.8", + "@babel/plugin-transform-function-name": "^7.18.9", + "@babel/plugin-transform-literals": "^7.18.9", + "@babel/plugin-transform-member-expression-literals": "^7.18.6", + "@babel/plugin-transform-modules-amd": "^7.18.6", + "@babel/plugin-transform-modules-commonjs": "^7.18.6", + "@babel/plugin-transform-modules-systemjs": "^7.19.0", + "@babel/plugin-transform-modules-umd": "^7.18.6", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.19.1", + "@babel/plugin-transform-new-target": "^7.18.6", + "@babel/plugin-transform-object-super": "^7.18.6", + "@babel/plugin-transform-parameters": "^7.18.8", + "@babel/plugin-transform-property-literals": "^7.18.6", + "@babel/plugin-transform-regenerator": "^7.18.6", + "@babel/plugin-transform-reserved-words": "^7.18.6", + "@babel/plugin-transform-shorthand-properties": "^7.18.6", + "@babel/plugin-transform-spread": "^7.19.0", + "@babel/plugin-transform-sticky-regex": "^7.18.6", + "@babel/plugin-transform-template-literals": "^7.18.9", + "@babel/plugin-transform-typeof-symbol": "^7.18.9", + "@babel/plugin-transform-unicode-escapes": "^7.18.10", + "@babel/plugin-transform-unicode-regex": "^7.18.6", "@babel/preset-modules": "^0.1.5", - "@babel/types": "^7.17.10", - "babel-plugin-polyfill-corejs2": "^0.3.0", - "babel-plugin-polyfill-corejs3": "^0.5.0", - "babel-plugin-polyfill-regenerator": "^0.3.0", - "core-js-compat": "^3.22.1", + "@babel/types": "^7.19.4", + "babel-plugin-polyfill-corejs2": "^0.3.3", + "babel-plugin-polyfill-corejs3": "^0.6.0", + "babel-plugin-polyfill-regenerator": "^0.4.1", + "core-js-compat": "^3.25.1", "semver": "^6.3.0" }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "@babel/preset-modules": { "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz", + "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==", "requires": { "@babel/helper-plugin-utils": "^7.0.0", "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", @@ -19890,89 +24246,174 @@ } }, "@babel/preset-react": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.18.6.tgz", + "integrity": "sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-validator-option": "^7.16.7", - "@babel/plugin-transform-react-display-name": "^7.16.7", - "@babel/plugin-transform-react-jsx": "^7.16.7", - "@babel/plugin-transform-react-jsx-development": "^7.16.7", - "@babel/plugin-transform-react-pure-annotations": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-validator-option": "^7.18.6", + "@babel/plugin-transform-react-display-name": "^7.18.6", + "@babel/plugin-transform-react-jsx": "^7.18.6", + "@babel/plugin-transform-react-jsx-development": "^7.18.6", + "@babel/plugin-transform-react-pure-annotations": "^7.18.6" } }, "@babel/preset-typescript": { - "version": "7.16.7", + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.18.6.tgz", + "integrity": "sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ==", "requires": { - "@babel/helper-plugin-utils": "^7.16.7", - "@babel/helper-validator-option": "^7.16.7", - "@babel/plugin-transform-typescript": "^7.16.7" + "@babel/helper-plugin-utils": "^7.18.6", + "@babel/helper-validator-option": "^7.18.6", + "@babel/plugin-transform-typescript": "^7.18.6" } }, "@babel/runtime": { - "version": "7.17.9", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.19.4.tgz", + "integrity": "sha512-EXpLCrk55f+cYqmHsSR+yD/0gAIMxxA9QK9lnQWzhMCvt+YmoBN7Zx94s++Kv0+unHk39vxNO8t+CMA2WSS3wA==", "requires": { "regenerator-runtime": "^0.13.4" } }, "@babel/runtime-corejs3": { - "version": "7.17.9", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.19.4.tgz", + "integrity": "sha512-HzjQ8+dzdx7dmZy4DQ8KV8aHi/74AjEbBGTFutBmg/pd3dY5/q1sfuOGPTFGEytlQhWoeVXqcK5BwMgIkRkNDQ==", "requires": { - "core-js-pure": "^3.20.2", + "core-js-pure": "^3.25.1", "regenerator-runtime": "^0.13.4" } }, "@babel/template": { - "version": "7.16.7", + "version": "7.18.10", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz", + "integrity": "sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==", "requires": { - "@babel/code-frame": "^7.16.7", - "@babel/parser": "^7.16.7", - "@babel/types": "^7.16.7" + "@babel/code-frame": "^7.18.6", + "@babel/parser": "^7.18.10", + "@babel/types": "^7.18.10" } }, "@babel/traverse": { - "version": "7.17.10", - "requires": { - "@babel/code-frame": "^7.16.7", - "@babel/generator": "^7.17.10", - "@babel/helper-environment-visitor": "^7.16.7", - "@babel/helper-function-name": "^7.17.9", - "@babel/helper-hoist-variables": "^7.16.7", - "@babel/helper-split-export-declaration": "^7.16.7", - "@babel/parser": "^7.17.10", - "@babel/types": "^7.17.10", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.19.4.tgz", + "integrity": "sha512-w3K1i+V5u2aJUOXBFFC5pveFLmtq1s3qcdDNC2qRI6WPBQIDaKFqXxDEqDO/h1dQ3HjsZoZMyIy6jGLq0xtw+g==", + "requires": { + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.4", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.19.0", + "@babel/helper-hoist-variables": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/parser": "^7.19.4", + "@babel/types": "^7.19.4", "debug": "^4.1.0", "globals": "^11.1.0" } }, "@babel/types": { - "version": "7.17.10", + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.19.4.tgz", + "integrity": "sha512-M5LK7nAeS6+9j7hAq+b3fQs+pNfUtTGq+yFFfHnauFA8zQtLRfmuipmsKDKKLuyG+wC8ABW43A153YNawNTEtw==", "requires": { - "@babel/helper-validator-identifier": "^7.16.7", + "@babel/helper-string-parser": "^7.19.4", + "@babel/helper-validator-identifier": "^7.19.1", "to-fast-properties": "^2.0.0" } }, "@bcoe/v8-coverage": { "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, - "@colors/colors": { - "version": "1.5.0", - "optional": true + "@colors/colors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", + "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", + "optional": true + }, + "@cypress/request": { + "version": "2.88.10", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.10.tgz", + "integrity": "sha512-Zp7F+R93N0yZyG34GutyTNr+okam7s/Fzc1+i3kcqOP8vk6OuajuE9qZJ6Rs+10/1JFtXFYMdyarnU1rZuJesg==", + "dev": true, + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "http-signature": "~1.3.6", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^8.3.2" + }, + "dependencies": { + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + } + } + }, + "@cypress/xvfb": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", + "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", + "dev": true, + "requires": { + "debug": "^3.1.0", + "lodash.once": "^4.1.1" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } }, "@docsearch/css": { - "version": "3.0.0" + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.2.1.tgz", + "integrity": "sha512-gaP6TxxwQC+K8D6TRx5WULUWKrcbzECOPA2KCVMuI+6C7dNiGUk5yXXzVhc5sld79XKYLnO9DRTI4mjXDYkh+g==" }, "@docsearch/react": { - "version": "3.0.0", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.2.1.tgz", + "integrity": "sha512-EzTQ/y82s14IQC5XVestiK/kFFMe2aagoYFuTAIfIb/e+4FU7kSMKonRtLwsCiLQHmjvNQq+HO+33giJ5YVtaQ==", "requires": { - "@algolia/autocomplete-core": "1.5.2", - "@algolia/autocomplete-preset-algolia": "1.5.2", - "@docsearch/css": "3.0.0", + "@algolia/autocomplete-core": "1.7.1", + "@algolia/autocomplete-preset-algolia": "1.7.1", + "@docsearch/css": "3.2.1", "algoliasearch": "^4.0.0" } }, "@docusaurus/core": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-2.0.0-beta.17.tgz", + "integrity": "sha512-iNdW7CsmHNOgc4PxD9BFxa+MD8+i7ln7erOBkF3FSMMPnsKUeVqsR3rr31aLmLZRlTXMITSPLxlXwtBZa3KPCw==", "requires": { "@babel/core": "^7.17.5", "@babel/generator": "^7.17.3", @@ -20049,98 +24490,10 @@ "webpackbar": "^5.0.2" }, "dependencies": { - "@svgr/babel-plugin-add-jsx-attribute": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-remove-jsx-attribute": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-svg-dynamic-title": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-svg-em-dimensions": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-transform-react-native-svg": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-transform-svg-component": { - "version": "6.2.0", - "requires": {} - }, - "@svgr/babel-preset": { - "version": "6.2.0", - "requires": { - "@svgr/babel-plugin-add-jsx-attribute": "^6.0.0", - "@svgr/babel-plugin-remove-jsx-attribute": "^6.0.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.0.0", - "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.0.0", - "@svgr/babel-plugin-svg-dynamic-title": "^6.0.0", - "@svgr/babel-plugin-svg-em-dimensions": "^6.0.0", - "@svgr/babel-plugin-transform-react-native-svg": "^6.0.0", - "@svgr/babel-plugin-transform-svg-component": "^6.2.0" - } - }, - "@svgr/core": { - "version": "6.2.1", - "requires": { - "@svgr/plugin-jsx": "^6.2.1", - "camelcase": "^6.2.0", - "cosmiconfig": "^7.0.1" - } - }, - "@svgr/hast-util-to-babel-ast": { - "version": "6.2.1", - "requires": { - "@babel/types": "^7.15.6", - "entities": "^3.0.1" - } - }, - "@svgr/plugin-jsx": { - "version": "6.2.1", - "requires": { - "@babel/core": "^7.15.5", - "@svgr/babel-preset": "^6.2.0", - "@svgr/hast-util-to-babel-ast": "^6.2.1", - "svg-parser": "^2.0.2" - } - }, - "@svgr/plugin-svgo": { - "version": "6.2.0", - "requires": { - "cosmiconfig": "^7.0.1", - "deepmerge": "^4.2.2", - "svgo": "^2.5.0" - } - }, - "@svgr/webpack": { - "version": "6.2.1", - "requires": { - "@babel/core": "^7.15.5", - "@babel/plugin-transform-react-constant-elements": "^7.14.5", - "@babel/preset-env": "^7.15.6", - "@babel/preset-react": "^7.14.5", - "@babel/preset-typescript": "^7.15.0", - "@svgr/core": "^6.2.1", - "@svgr/plugin-jsx": "^6.2.1", - "@svgr/plugin-svgo": "^6.2.0" - } - }, "css-loader": { "version": "6.7.1", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.7.1.tgz", + "integrity": "sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw==", "requires": { "icss-utils": "^5.1.0", "postcss": "^8.4.7", @@ -20152,29 +24505,22 @@ "semver": "^7.3.5" } }, - "css-tree": { - "version": "1.1.3", - "requires": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" - } - }, - "entities": { - "version": "3.0.1" - }, "icss-utils": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz", + "integrity": "sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==", "requires": {} }, - "mdn-data": { - "version": "2.0.14" - }, "postcss-modules-extract-imports": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz", + "integrity": "sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw==", "requires": {} }, "postcss-modules-local-by-default": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz", + "integrity": "sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ==", "requires": { "icss-utils": "^5.0.0", "postcss-selector-parser": "^6.0.2", @@ -20183,37 +24529,26 @@ }, "postcss-modules-scope": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz", + "integrity": "sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg==", "requires": { "postcss-selector-parser": "^6.0.4" } }, "postcss-modules-values": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz", + "integrity": "sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==", "requires": { "icss-utils": "^5.0.0" } - }, - "svgo": { - "version": "2.8.0", - "requires": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^4.1.3", - "css-tree": "^1.1.3", - "csso": "^4.2.0", - "picocolors": "^1.0.0", - "stable": "^0.1.8" - }, - "dependencies": { - "commander": { - "version": "7.2.0" - } - } } } }, "@docusaurus/cssnano-preset": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-beta.17.tgz", + "integrity": "sha512-DoBwtLjJ9IY9/lNMHIEdo90L4NDayvU28nLgtjR2Sc6aBIMEB/3a5Ndjehnp+jZAkwcDdNASA86EkZVUyz1O1A==", "requires": { "cssnano-preset-advanced": "^5.1.12", "postcss": "^8.4.7", @@ -20222,6 +24557,8 @@ }, "@docusaurus/logger": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.0.0-beta.17.tgz", + "integrity": "sha512-F9JDl06/VLg+ylsvnq9NpILSUeWtl0j4H2LtlLzX5gufEL4dGiCMlnUzYdHl7FSHSzYJ0A/R7vu0SYofsexC4w==", "requires": { "chalk": "^4.1.2", "tslib": "^2.3.1" @@ -20229,12 +24566,16 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -20242,18 +24583,26 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "has-flag": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "requires": { "has-flag": "^4.0.0" } @@ -20262,6 +24611,8 @@ }, "@docusaurus/lqip-loader": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/lqip-loader/-/lqip-loader-2.0.0-beta.17.tgz", + "integrity": "sha512-pqwVuSYOthQgVVUmc8lI36hHzewKbBZBjLgxl3Khd2xL1gShZcb8o0Zx0IXSvqskZaq3E3TXRuzBAjyvIRaKdw==", "requires": { "@docusaurus/logger": "2.0.0-beta.17", "file-loader": "^6.2.0", @@ -20273,6 +24624,8 @@ }, "@docusaurus/mdx-loader": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-beta.17.tgz", + "integrity": "sha512-AhJ3GWRmjQYCyINHE595pff5tn3Rt83oGpdev5UT9uvG9lPYPC8nEmh1LI6c0ogfw7YkNznzxWSW4hyyVbYQ3A==", "requires": { "@babel/parser": "^7.17.3", "@babel/traverse": "^7.17.3", @@ -20294,6 +24647,8 @@ }, "@docusaurus/module-type-aliases": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.0.0-beta.17.tgz", + "integrity": "sha512-Tu+8geC/wyygBudbSwvWIHEvt5RwyA7dEoE1JmPbgQtmqUxOZ9bgnfemwXpJW5mKuDiJASbN4of1DhbLqf4sPg==", "requires": { "@docusaurus/types": "2.0.0-beta.17", "@types/react": "*", @@ -20304,6 +24659,8 @@ }, "@docusaurus/plugin-content-blog": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-beta.17.tgz", + "integrity": "sha512-gcX4UR+WKT4bhF8FICBQHy+ESS9iRMeaglSboTZbA/YHGax/3EuZtcPU3dU4E/HFJeZ866wgUdbLKpIpsZOidg==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/logger": "2.0.0-beta.17", @@ -20324,6 +24681,8 @@ }, "@docusaurus/plugin-content-docs": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-beta.17.tgz", + "integrity": "sha512-YYrBpuRfTfE6NtENrpSHTJ7K7PZifn6j6hcuvdC0QKE+WD8pS+O2/Ws30yoyvHwLnAnfhvaderh1v9Kaa0/ANg==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/logger": "2.0.0-beta.17", @@ -20343,6 +24702,8 @@ }, "@docusaurus/plugin-content-pages": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-beta.17.tgz", + "integrity": "sha512-d5x0mXTMJ44ojRQccmLyshYoamFOep2AnBe69osCDnwWMbD3Or3pnc2KMK9N7mVpQFnNFKbHNCLrX3Rv0uwEHA==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/mdx-loader": "2.0.0-beta.17", @@ -20356,6 +24717,8 @@ }, "@docusaurus/plugin-debug": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-2.0.0-beta.17.tgz", + "integrity": "sha512-p26fjYFRSC0esEmKo/kRrLVwXoFnzPCFDumwrImhPyqfVxbj+IKFaiXkayb2qHnyEGE/1KSDIgRF4CHt/pyhiw==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/utils": "2.0.0-beta.17", @@ -20366,6 +24729,8 @@ }, "@docusaurus/plugin-google-analytics": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.0-beta.17.tgz", + "integrity": "sha512-jvgYIhggYD1W2jymqQVAAyjPJUV1xMCn70bAzaCMxriureMWzhQ/kQMVQpop0ijTMvifOxaV9yTcL1VRXev++A==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/utils-validation": "2.0.0-beta.17", @@ -20374,6 +24739,8 @@ }, "@docusaurus/plugin-google-gtag": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.0-beta.17.tgz", + "integrity": "sha512-1pnWHtIk1Jfeqwvr8PlcPE5SODWT1gW4TI+ptmJbJ296FjjyvL/pG0AcGEJmYLY/OQc3oz0VQ0W2ognw9jmFIw==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/utils-validation": "2.0.0-beta.17", @@ -20382,6 +24749,8 @@ }, "@docusaurus/plugin-ideal-image": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-ideal-image/-/plugin-ideal-image-2.0.0-beta.17.tgz", + "integrity": "sha512-9ZUi3cVnQsOCW4NSDyQRbH4UmlLqclKjQNQv+O/BKwO5EkjLeB4Q2TeHaXkivntrY4nARGjRcGBHKb3L9zzJXA==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/lqip-loader": "2.0.0-beta.17", @@ -20397,6 +24766,8 @@ }, "@docusaurus/plugin-sitemap": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.0-beta.17.tgz", + "integrity": "sha512-19/PaGCsap6cjUPZPGs87yV9e1hAIyd0CTSeVV6Caega8nmOKk20FTrQGFJjZPeX8jvD9QIXcdg6BJnPxcKkaQ==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/utils": "2.0.0-beta.17", @@ -20409,6 +24780,8 @@ }, "@docusaurus/preset-classic": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-2.0.0-beta.17.tgz", + "integrity": "sha512-7YUxPEgM09aZWr25/hpDEp1gPl+1KsCPV1ZTRW43sbQ9TinPm+9AKR3rHVDa8ea8MdiS7BpqCVyK+H/eiyQrUw==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/plugin-content-blog": "2.0.0-beta.17", @@ -20425,6 +24798,8 @@ }, "@docusaurus/react-loadable": { "version": "5.5.2", + "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", + "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "requires": { "@types/react": "*", "prop-types": "^15.6.2" @@ -20432,12 +24807,16 @@ }, "@docusaurus/responsive-loader": { "version": "1.7.0", + "resolved": "https://registry.npmjs.org/@docusaurus/responsive-loader/-/responsive-loader-1.7.0.tgz", + "integrity": "sha512-N0cWuVqTRXRvkBxeMQcy/OF2l7GN8rmni5EzR3HpwR+iU2ckYPnziceojcxvvxQ5NqZg1QfEW0tycQgHp+e+Nw==", "requires": { "loader-utils": "^2.0.0" } }, "@docusaurus/theme-classic": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-2.0.0-beta.17.tgz", + "integrity": "sha512-xfZ9kpgqo0lP9YO4rJj79wtiQJXU6ARo5wYy10IIwiWN+lg00scJHhkmNV431b05xIUjUr0cKeH9nqZmEsQRKg==", "requires": { "@docusaurus/core": "2.0.0-beta.17", "@docusaurus/plugin-content-blog": "2.0.0-beta.17", @@ -20462,6 +24841,8 @@ }, "@docusaurus/theme-common": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.0.0-beta.17.tgz", + "integrity": "sha512-LJBDhx+Qexn1JHBqZbE4k+7lBaV1LgpE33enXf43ShB7ebhC91d5HLHhBwgt0pih4+elZU4rG+BG/roAmsNM0g==", "requires": { "@docusaurus/module-type-aliases": "2.0.0-beta.17", "@docusaurus/plugin-content-blog": "2.0.0-beta.17", @@ -20476,6 +24857,8 @@ }, "@docusaurus/theme-search-algolia": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.0-beta.17.tgz", + "integrity": "sha512-W12XKM7QC5Jmrec359bJ7aDp5U8DNkCxjVKsMNIs8rDunBoI/N+R35ERJ0N7Bg9ONAWO6o7VkUERQsfGqdvr9w==", "requires": { "@docsearch/react": "^3.0.0", "@docusaurus/core": "2.0.0-beta.17", @@ -20496,6 +24879,8 @@ }, "@docusaurus/theme-translations": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-2.0.0-beta.17.tgz", + "integrity": "sha512-oxCX6khjZH3lgdRCL0DH06KkUM/kDr9+lzB35+vY8rpFeQruVgRdi8ekPqG3+Wr0U/N+LMhcYE5BmCb6D0Fv2A==", "requires": { "fs-extra": "^10.0.1", "tslib": "^2.3.1" @@ -20503,6 +24888,8 @@ }, "@docusaurus/types": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-2.0.0-beta.17.tgz", + "integrity": "sha512-4o7TXu5sKlQpybfFFtsGUElBXwSpiXKsQyyWaRKj7DRBkvMtkDX6ITZNnZO9+EHfLbP/cfrokB8C/oO7mCQ5BQ==", "requires": { "commander": "^5.1.0", "joi": "^17.6.0", @@ -20514,6 +24901,8 @@ }, "@docusaurus/utils": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.0.0-beta.17.tgz", + "integrity": "sha512-yRKGdzSc5v6M/6GyQ4omkrAHCleevwKYiIrufCJgRbOtkhYE574d8mIjjirOuA/emcyLxjh+TLtqAA5TwhIryA==", "requires": { "@docusaurus/logger": "2.0.0-beta.17", "@svgr/webpack": "^6.0.0", @@ -20530,136 +24919,20 @@ "tslib": "^2.3.1", "url-loader": "^4.1.1", "webpack": "^5.69.1" - }, - "dependencies": { - "@svgr/babel-plugin-add-jsx-attribute": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-remove-jsx-attribute": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-svg-dynamic-title": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-svg-em-dimensions": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-transform-react-native-svg": { - "version": "6.0.0", - "requires": {} - }, - "@svgr/babel-plugin-transform-svg-component": { - "version": "6.2.0", - "requires": {} - }, - "@svgr/babel-preset": { - "version": "6.2.0", - "requires": { - "@svgr/babel-plugin-add-jsx-attribute": "^6.0.0", - "@svgr/babel-plugin-remove-jsx-attribute": "^6.0.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.0.0", - "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.0.0", - "@svgr/babel-plugin-svg-dynamic-title": "^6.0.0", - "@svgr/babel-plugin-svg-em-dimensions": "^6.0.0", - "@svgr/babel-plugin-transform-react-native-svg": "^6.0.0", - "@svgr/babel-plugin-transform-svg-component": "^6.2.0" - } - }, - "@svgr/core": { - "version": "6.2.1", - "requires": { - "@svgr/plugin-jsx": "^6.2.1", - "camelcase": "^6.2.0", - "cosmiconfig": "^7.0.1" - } - }, - "@svgr/hast-util-to-babel-ast": { - "version": "6.2.1", - "requires": { - "@babel/types": "^7.15.6", - "entities": "^3.0.1" - } - }, - "@svgr/plugin-jsx": { - "version": "6.2.1", - "requires": { - "@babel/core": "^7.15.5", - "@svgr/babel-preset": "^6.2.0", - "@svgr/hast-util-to-babel-ast": "^6.2.1", - "svg-parser": "^2.0.2" - } - }, - "@svgr/plugin-svgo": { - "version": "6.2.0", - "requires": { - "cosmiconfig": "^7.0.1", - "deepmerge": "^4.2.2", - "svgo": "^2.5.0" - } - }, - "@svgr/webpack": { - "version": "6.2.1", - "requires": { - "@babel/core": "^7.15.5", - "@babel/plugin-transform-react-constant-elements": "^7.14.5", - "@babel/preset-env": "^7.15.6", - "@babel/preset-react": "^7.14.5", - "@babel/preset-typescript": "^7.15.0", - "@svgr/core": "^6.2.1", - "@svgr/plugin-jsx": "^6.2.1", - "@svgr/plugin-svgo": "^6.2.0" - } - }, - "commander": { - "version": "7.2.0" - }, - "css-tree": { - "version": "1.1.3", - "requires": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" - } - }, - "entities": { - "version": "3.0.1" - }, - "mdn-data": { - "version": "2.0.14" - }, - "svgo": { - "version": "2.8.0", - "requires": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^4.1.3", - "css-tree": "^1.1.3", - "csso": "^4.2.0", - "picocolors": "^1.0.0", - "stable": "^0.1.8" - } - } } }, "@docusaurus/utils-common": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-2.0.0-beta.17.tgz", + "integrity": "sha512-90WCVdj6zYzs7neEIS594qfLO78cUL6EVK1CsRHJgVkkGjcYlCQ1NwkyO7bOb+nIAwdJrPJRc2FBSpuEGxPD3w==", "requires": { "tslib": "^2.3.1" } }, "@docusaurus/utils-validation": { "version": "2.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-2.0.0-beta.17.tgz", + "integrity": "sha512-5UjayUP16fDjgd52eSEhL7SlN9x60pIhyS+K7kt7RmpSLy42+4/bSr2pns2VlATmuaoNOO6iIFdB2jgSYJ6SGA==", "requires": { "@docusaurus/logger": "2.0.0-beta.17", "@docusaurus/utils": "2.0.0-beta.17", @@ -20669,37 +24942,55 @@ }, "@emotion/is-prop-valid": { "version": "0.8.8", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", + "integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==", "requires": { "@emotion/memoize": "0.7.4" } }, "@emotion/memoize": { - "version": "0.7.4" + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz", + "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==" }, "@emotion/stylis": { - "version": "0.8.5" + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/@emotion/stylis/-/stylis-0.8.5.tgz", + "integrity": "sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ==" }, "@emotion/unitless": { - "version": "0.7.5" + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz", + "integrity": "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==" }, "@endiliey/react-ideal-image": { "version": "0.0.11", + "resolved": "https://registry.npmjs.org/@endiliey/react-ideal-image/-/react-ideal-image-0.0.11.tgz", + "integrity": "sha512-QxMjt/Gvur/gLxSoCy7VIyGGGrGmDN+VHcXkN3R2ApoWX0EYUE+hMgPHSW/PV6VVebZ1Nd4t2UnGRBDihu16JQ==", "requires": {} }, "@exodus/schemasafe": { - "version": "1.0.0-rc.6" + "version": "1.0.0-rc.9", + "resolved": "https://registry.npmjs.org/@exodus/schemasafe/-/schemasafe-1.0.0-rc.9.tgz", + "integrity": "sha512-dGGHpb61hLwifAu7sotuHFDBw6GTdpG8aKC0fsK17EuTzMRvUrH7lEAr6LTJ+sx3AZYed9yZ77rltVDHyg2hRg==" }, "@hapi/hoek": { - "version": "9.3.0" + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", + "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==" }, "@hapi/topo": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", + "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", "requires": { "@hapi/hoek": "^9.0.0" } }, "@istanbuljs/load-nyc-config": { "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", "dev": true, "requires": { "camelcase": "^5.3.1", @@ -20711,6 +25002,8 @@ "dependencies": { "argparse": { "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dev": true, "requires": { "sprintf-js": "~1.0.2" @@ -20718,10 +25011,14 @@ }, "camelcase": { "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", "dev": true }, "js-yaml": { "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dev": true, "requires": { "argparse": "^1.0.7", @@ -20730,16 +25027,22 @@ }, "resolve-from": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true } } }, "@istanbuljs/schema": { "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", "dev": true }, "@jest/console": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-27.5.1.tgz", + "integrity": "sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg==", "dev": true, "requires": { "@jest/types": "^27.5.1", @@ -20752,6 +25055,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -20759,6 +25064,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -20767,6 +25074,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -20774,14 +25083,51 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -20791,6 +25137,8 @@ }, "@jest/core": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-27.5.1.tgz", + "integrity": "sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ==", "dev": true, "requires": { "@jest/console": "^27.5.1", @@ -20825,6 +25173,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -20832,6 +25182,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -20840,6 +25192,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -20847,14 +25201,51 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -20864,6 +25255,8 @@ }, "@jest/create-cache-key-function": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/create-cache-key-function/-/create-cache-key-function-27.5.1.tgz", + "integrity": "sha512-dmH1yW+makpTSURTy8VzdUwFnfQh1G8R+DxO2Ho2FFmBbKFEVm+3jWdvFhE2VqB/LATCTokkP0dotjyQyw5/AQ==", "dev": true, "requires": { "@jest/types": "^27.5.1" @@ -20871,6 +25264,8 @@ }, "@jest/environment": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-27.5.1.tgz", + "integrity": "sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA==", "dev": true, "requires": { "@jest/fake-timers": "^27.5.1", @@ -20879,8 +25274,19 @@ "jest-mock": "^27.5.1" } }, + "@jest/expect-utils": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.2.1.tgz", + "integrity": "sha512-yr4aHNg5Z1CjKby5ozm7sKjgBlCOorlAoFcvrOQ/4rbZRfgZQdnmh7cth192PYIgiPZo2bBXvqdOApnAMWFJZg==", + "dev": true, + "requires": { + "jest-get-type": "^29.2.0" + } + }, "@jest/fake-timers": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-27.5.1.tgz", + "integrity": "sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ==", "dev": true, "requires": { "@jest/types": "^27.5.1", @@ -20889,19 +25295,221 @@ "jest-message-util": "^27.5.1", "jest-mock": "^27.5.1", "jest-util": "^27.5.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "@jest/globals": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-27.5.1.tgz", + "integrity": "sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q==", "dev": true, "requires": { "@jest/environment": "^27.5.1", "@jest/types": "^27.5.1", "expect": "^27.5.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", + "dev": true + }, + "expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + } + }, + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true + }, + "jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + } + }, + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "@jest/reporters": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-27.5.1.tgz", + "integrity": "sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw==", "dev": true, "requires": { "@bcoe/v8-coverage": "^0.2.3", @@ -20933,6 +25541,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -20940,6 +25550,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -20948,6 +25560,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -20955,14 +25569,34 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -20970,8 +25604,19 @@ } } }, + "@jest/schemas": { + "version": "29.0.0", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.0.0.tgz", + "integrity": "sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA==", + "dev": true, + "requires": { + "@sinclair/typebox": "^0.24.1" + } + }, "@jest/source-map": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-27.5.1.tgz", + "integrity": "sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg==", "dev": true, "requires": { "callsites": "^3.0.0", @@ -20981,6 +25626,8 @@ }, "@jest/test-result": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-27.5.1.tgz", + "integrity": "sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag==", "dev": true, "requires": { "@jest/console": "^27.5.1", @@ -20991,6 +25638,8 @@ }, "@jest/test-sequencer": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz", + "integrity": "sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ==", "dev": true, "requires": { "@jest/test-result": "^27.5.1", @@ -21001,6 +25650,8 @@ }, "@jest/transform": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-27.5.1.tgz", + "integrity": "sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw==", "dev": true, "requires": { "@babel/core": "^7.1.0", @@ -21022,6 +25673,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -21029,6 +25682,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -21037,6 +25692,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -21044,14 +25701,34 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -21061,6 +25738,8 @@ }, "@jest/types": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.5.1.tgz", + "integrity": "sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==", "dev": true, "requires": { "@types/istanbul-lib-coverage": "^2.0.0", @@ -21072,6 +25751,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -21079,6 +25760,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -21087,6 +25770,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -21094,14 +25779,20 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -21110,18 +25801,22 @@ } }, "@jimp/bmp": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/bmp/-/bmp-0.16.2.tgz", + "integrity": "sha512-4g9vW45QfMoGhLVvaFj26h4e7cC+McHUQwyFQmNTLW4FfC1OonN9oUr2m/FEDGkTYKR7aqdXR5XUqqIkHWLaFw==", "requires": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", + "@jimp/utils": "^0.16.2", "bmp-js": "^0.1.0" } }, "@jimp/core": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/core/-/core-0.16.2.tgz", + "integrity": "sha512-dp7HcyUMzjXphXYodI6PaXue+I9PXAavbb+AN+1XqFbotN22Z12DosNPEyy+UhLY/hZiQQqUkEaJHkvV31rs+w==", "requires": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", + "@jimp/utils": "^0.16.2", "any-base": "^1.1.0", "buffer": "^5.2.0", "exif-parser": "^0.1.12", @@ -21131,77 +25826,84 @@ "phin": "^2.9.1", "pixelmatch": "^4.0.2", "tinycolor2": "^1.4.1" - }, - "dependencies": { - "buffer": { - "version": "5.7.1", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - } } }, "@jimp/custom": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/custom/-/custom-0.16.2.tgz", + "integrity": "sha512-GtNwOs4hcVS2GIbqRUf42rUuX07oLB92cj7cqxZb0ZGWwcwhnmSW0TFLAkNafXmqn9ug4VTpNvcJSUdiuECVKg==", "requires": { "@babel/runtime": "^7.7.2", - "@jimp/core": "^0.16.1" + "@jimp/core": "^0.16.2" } }, "@jimp/gif": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/gif/-/gif-0.16.2.tgz", + "integrity": "sha512-TMdyT9Q0paIKNtT7c5KzQD29CNCsI/t8ka28jMrBjEK7j5RRTvBfuoOnHv7pDJRCjCIqeUoaUSJ7QcciKic6CA==", "requires": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", + "@jimp/utils": "^0.16.2", "gifwrap": "^0.9.2", "omggif": "^1.0.9" } }, "@jimp/jpeg": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/jpeg/-/jpeg-0.16.2.tgz", + "integrity": "sha512-BW5gZydgq6wdIwHd+3iUNgrTklvoQc/FUKSj9meM6A0FU21lUaansRX5BDdJqHkyXJLnnlDGwDt27J+hQuBAVw==", "requires": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", - "jpeg-js": "0.4.2" + "@jimp/utils": "^0.16.2", + "jpeg-js": "^0.4.2" } }, "@jimp/plugin-resize": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/plugin-resize/-/plugin-resize-0.16.2.tgz", + "integrity": "sha512-gE4N9l6xuwzacFZ2EPCGZCJ/xR+aX2V7GdMndIl/6kYIw5/eib1SFuF9AZLvIPSFuE1FnGo8+vT0pr++SSbhYg==", "requires": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1" + "@jimp/utils": "^0.16.2" } }, "@jimp/png": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/png/-/png-0.16.2.tgz", + "integrity": "sha512-sFOtOSz/tzDwXEChFQ/Nxe+0+vG3Tj0eUxnZVDUG/StXE9dI8Bqmwj3MIa0EgK5s+QG3YlnDOmlPUa4JqmeYeQ==", "requires": { "@babel/runtime": "^7.7.2", - "@jimp/utils": "^0.16.1", + "@jimp/utils": "^0.16.2", "pngjs": "^3.3.3" } }, "@jimp/tiff": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/tiff/-/tiff-0.16.2.tgz", + "integrity": "sha512-ADcdqmtZF+U2YoaaHTzFX8D6NFpmN4WZUT0BPMerEuY7Cq8QoLYU22z2h034FrVW+Rbi1b3y04sB9iDiQAlf2w==", "requires": { "@babel/runtime": "^7.7.2", "utif": "^2.0.1" } }, "@jimp/types": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/types/-/types-0.16.2.tgz", + "integrity": "sha512-0Ue5Sq0XnDF6TirisWv5E+8uOnRcd8vRLuwocJOhF76NIlcQrz+5r2k2XWKcr3d+11n28dHLXW5TKSqrUopxhA==", "requires": { "@babel/runtime": "^7.7.2", - "@jimp/bmp": "^0.16.1", - "@jimp/gif": "^0.16.1", - "@jimp/jpeg": "^0.16.1", - "@jimp/png": "^0.16.1", - "@jimp/tiff": "^0.16.1", + "@jimp/bmp": "^0.16.2", + "@jimp/gif": "^0.16.2", + "@jimp/jpeg": "^0.16.2", + "@jimp/png": "^0.16.2", + "@jimp/tiff": "^0.16.2", "timm": "^1.6.1" } }, "@jimp/utils": { - "version": "0.16.1", + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@jimp/utils/-/utils-0.16.2.tgz", + "integrity": "sha512-XENrPvmigiXZQ8E2nxJqO6UVvWBLzbNwyYi3Y8Q1IECoYhYI3kgOQ0fmy4G269Vz1V0omh1bNmC42r4OfXg1Jg==", "requires": { "@babel/runtime": "^7.7.2", "regenerator-runtime": "^0.13.3" @@ -21209,32 +25911,67 @@ }, "@jridgewell/gen-mapping": { "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", + "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", "requires": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "@jridgewell/resolve-uri": { - "version": "3.0.6" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==" }, "@jridgewell/set-array": { - "version": "1.1.0" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==" + }, + "@jridgewell/source-map": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz", + "integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==", + "requires": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "dependencies": { + "@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "requires": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } + } + } }, "@jridgewell/sourcemap-codec": { - "version": "1.4.11" + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==" }, "@jridgewell/trace-mapping": { - "version": "0.3.9", + "version": "0.3.17", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz", + "integrity": "sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==", "requires": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" } }, "@leichtgewicht/ip-codec": { - "version": "2.0.3" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz", + "integrity": "sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==" }, "@mdx-js/mdx": { "version": "1.6.22", + "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz", + "integrity": "sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA==", "requires": { "@babel/core": "7.12.9", "@babel/plugin-syntax-jsx": "7.12.1", @@ -21259,6 +25996,8 @@ "dependencies": { "@babel/core": { "version": "7.12.9", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", + "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "requires": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", @@ -21280,47 +26019,67 @@ }, "@babel/plugin-syntax-jsx": { "version": "7.12.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz", + "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" } }, "semver": { - "version": "5.7.1" + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" }, "source-map": { - "version": "0.5.7" + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==" } } }, "@mdx-js/react": { "version": "1.6.22", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", + "integrity": "sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==", "requires": {} }, "@mdx-js/util": { - "version": "1.6.22" + "version": "1.6.22", + "resolved": "https://registry.npmjs.org/@mdx-js/util/-/util-1.6.22.tgz", + "integrity": "sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA==" }, "@nodelib/fs.scandir": { "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "requires": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "@nodelib/fs.stat": { - "version": "2.0.5" + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==" }, "@nodelib/fs.walk": { "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "requires": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "@polka/url": { - "version": "1.0.0-next.21" + "version": "1.0.0-next.21", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz", + "integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==" }, "@redocly/ajv": { - "version": "8.6.4", + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/@redocly/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-9GWx27t7xWhDIR02PA18nzBdLcKQRgc46xNQvjFkrYk4UOmvKhJ/dawwiX0cCOeetN5LcaaiqQbVOWYK62SGHw==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -21329,46 +26088,83 @@ } }, "@redocly/openapi-core": { - "version": "1.0.0-beta.94", + "version": "1.0.0-beta.111", + "resolved": "https://registry.npmjs.org/@redocly/openapi-core/-/openapi-core-1.0.0-beta.111.tgz", + "integrity": "sha512-t3dwM+IpQWisFyVvcgd7x9LvIv3B2uQOrfyHoQcEIERW3wsouvX98Vh/qhCh+xAXXXpH8Sh+k4Cz2Qn4ei9VAw==", "requires": { - "@redocly/ajv": "^8.6.4", + "@redocly/ajv": "^8.11.0", "@types/node": "^14.11.8", "colorette": "^1.2.0", "js-levenshtein": "^1.1.6", "js-yaml": "^4.1.0", "lodash.isequal": "^4.5.0", - "minimatch": "^3.0.4", + "minimatch": "^5.0.1", "node-fetch": "^2.6.1", "pluralize": "^8.0.0", "yaml-ast-parser": "0.0.43" }, "dependencies": { "@types/node": { - "version": "14.18.16" + "version": "14.18.32", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.32.tgz", + "integrity": "sha512-Y6S38pFr04yb13qqHf8uk1nHE3lXgQ30WZbv1mLliV9pt0NjvqdWttLcrOYLnXbOafknVYRHZGoMSpR9UwfYow==" + }, + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "requires": { + "balanced-match": "^1.0.0" + } + }, + "colorette": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.4.0.tgz", + "integrity": "sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==" + }, + "minimatch": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.0.tgz", + "integrity": "sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==", + "requires": { + "brace-expansion": "^2.0.1" + } } } }, - "@redocly/react-dropdown-aria": { - "version": "2.0.12", - "requires": {} - }, "@sideway/address": { "version": "4.1.4", + "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz", + "integrity": "sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==", "requires": { "@hapi/hoek": "^9.0.0" } }, "@sideway/formula": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz", + "integrity": "sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==" }, "@sideway/pinpoint": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", + "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==" + }, + "@sinclair/typebox": { + "version": "0.24.47", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.47.tgz", + "integrity": "sha512-J4Xw0xYK4h7eC34MNOPQi6IkNxGRck6n4VJpWDzXIFVTW8I/D43Gf+NfWz/v/7NHlzWOPd3+T4PJ4OqklQ2u7A==", + "dev": true }, "@sindresorhus/is": { - "version": "0.14.0" + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", + "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==" }, "@sinonjs/commons": { "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", + "integrity": "sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==", "dev": true, "requires": { "type-detect": "4.0.8" @@ -21376,218 +26172,347 @@ }, "@sinonjs/fake-timers": { "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz", + "integrity": "sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg==", "dev": true, "requires": { "@sinonjs/commons": "^1.7.0" } }, "@slorber/static-site-generator-webpack-plugin": { - "version": "4.0.4", + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz", + "integrity": "sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA==", "requires": { - "bluebird": "^3.7.1", - "cheerio": "^0.22.0", "eval": "^0.1.8", - "webpack-sources": "^1.4.3" - }, - "dependencies": { - "cheerio": { - "version": "0.22.0", - "requires": { - "css-select": "~1.2.0", - "dom-serializer": "~0.1.0", - "entities": "~1.1.1", - "htmlparser2": "^3.9.1", - "lodash.assignin": "^4.0.9", - "lodash.bind": "^4.1.4", - "lodash.defaults": "^4.0.1", - "lodash.filter": "^4.4.0", - "lodash.flatten": "^4.2.0", - "lodash.foreach": "^4.3.0", - "lodash.map": "^4.4.0", - "lodash.merge": "^4.4.0", - "lodash.pick": "^4.2.1", - "lodash.reduce": "^4.4.0", - "lodash.reject": "^4.4.0", - "lodash.some": "^4.4.0" - } - }, - "css-select": { - "version": "1.2.0", - "requires": { - "boolbase": "~1.0.0", - "css-what": "2.1", - "domutils": "1.5.1", - "nth-check": "~1.0.1" - } - }, - "css-what": { - "version": "2.1.3" - }, - "dom-serializer": { - "version": "0.1.1", - "requires": { - "domelementtype": "^1.3.0", - "entities": "^1.1.1" - } - }, - "domelementtype": { - "version": "1.3.1" - }, - "domhandler": { - "version": "2.4.2", - "requires": { - "domelementtype": "1" - } - }, - "domutils": { - "version": "1.5.1", - "requires": { - "dom-serializer": "0", - "domelementtype": "1" - } - }, - "entities": { - "version": "1.1.2" - }, - "htmlparser2": { - "version": "3.10.1", - "requires": { - "domelementtype": "^1.3.1", - "domhandler": "^2.3.0", - "domutils": "^1.5.1", - "entities": "^1.1.1", - "inherits": "^2.0.1", - "readable-stream": "^3.1.1" - } - }, - "nth-check": { - "version": "1.0.2", - "requires": { - "boolbase": "~1.0.0" - } - } + "p-map": "^4.0.0", + "webpack-sources": "^3.2.2" } }, "@svgr/babel-plugin-add-jsx-attribute": { - "version": "5.4.0" + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.5.0.tgz", + "integrity": "sha512-Cp1JR1IPrQNvPRbkfcPmax52iunBC+eQDyBce8feOIIbVH6ZpVhErYoJtPWRBj2rKi4Wi9HvCm1+L1UD6QlBmg==", + "requires": {} }, "@svgr/babel-plugin-remove-jsx-attribute": { - "version": "5.4.0" + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.5.0.tgz", + "integrity": "sha512-8zYdkym7qNyfXpWvu4yq46k41pyNM9SOstoWhKlm+IfdCE1DdnRKeMUPsWIEO/DEkaWxJ8T9esNdG3QwQ93jBA==", + "requires": {} }, "@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "5.0.1" + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.5.0.tgz", + "integrity": "sha512-NFdxMq3xA42Kb1UbzCVxplUc0iqSyM9X8kopImvFnB+uSDdzIHOdbs1op8ofAvVRtbg4oZiyRl3fTYeKcOe9Iw==", + "requires": {} }, "@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "5.0.1" + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.5.0.tgz", + "integrity": "sha512-XWm64/rSPUCQ+MFyA9lhMO+w8bOZvkTvovRIU1lpIy63ysPaVAFtxjQiZj+S7QaLaLGUXkSkf8WZsaN+QPo/gA==", + "requires": {} }, "@svgr/babel-plugin-svg-dynamic-title": { - "version": "5.4.0" + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.5.0.tgz", + "integrity": "sha512-JIF2D2ltiWFGlTw2fJ9jJg1fNT9rWjOD2Cf0/xzeW6Z2LIRQTHcRHxpZq359+SRWtEPsCXEWV2Xmd+DMBj6dBw==", + "requires": {} }, "@svgr/babel-plugin-svg-em-dimensions": { - "version": "5.4.0" + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.5.0.tgz", + "integrity": "sha512-uuo0FfLP4Nu2zncOcoUFDzZdXWma2bxkTGk0etRThs4/PghvPIGaW8cPhCg6yJ8zpaauWcKV0wZtzKlJRCtVzg==", + "requires": {} }, "@svgr/babel-plugin-transform-react-native-svg": { - "version": "5.4.0" + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.5.0.tgz", + "integrity": "sha512-VMRWyOmrV+DaEFPgP3hZMsFgs2g87ojs3txw0Rx8iz6Nf/E3UoHUwTqpkSCWd3Hsnc9gMOY9+wl6+/Ycleh1sw==", + "requires": {} }, "@svgr/babel-plugin-transform-svg-component": { - "version": "5.5.0" + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.5.0.tgz", + "integrity": "sha512-b67Ul3SelaqvGEEG/1B3VJ03KUtGFgRQjRLCCjdttMQLcYa9l/izQFEclNFx53pNqhijUMNKHPhGMY/CWGVKig==", + "requires": {} }, "@svgr/babel-preset": { - "version": "5.5.0", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.5.0.tgz", + "integrity": "sha512-UWM98PKVuMqw2UZo8YO3erI6nF1n7/XBYTXBqR0QhZP7HTjYK6QxFNvPfIshddy1hBdzhVpkf148Vg8xiVOtyg==", "requires": { - "@svgr/babel-plugin-add-jsx-attribute": "^5.4.0", - "@svgr/babel-plugin-remove-jsx-attribute": "^5.4.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "^5.0.1", - "@svgr/babel-plugin-replace-jsx-attribute-value": "^5.0.1", - "@svgr/babel-plugin-svg-dynamic-title": "^5.4.0", - "@svgr/babel-plugin-svg-em-dimensions": "^5.4.0", - "@svgr/babel-plugin-transform-react-native-svg": "^5.4.0", - "@svgr/babel-plugin-transform-svg-component": "^5.5.0" + "@svgr/babel-plugin-add-jsx-attribute": "^6.5.0", + "@svgr/babel-plugin-remove-jsx-attribute": "^6.5.0", + "@svgr/babel-plugin-remove-jsx-empty-expression": "^6.5.0", + "@svgr/babel-plugin-replace-jsx-attribute-value": "^6.5.0", + "@svgr/babel-plugin-svg-dynamic-title": "^6.5.0", + "@svgr/babel-plugin-svg-em-dimensions": "^6.5.0", + "@svgr/babel-plugin-transform-react-native-svg": "^6.5.0", + "@svgr/babel-plugin-transform-svg-component": "^6.5.0" } }, "@svgr/core": { - "version": "5.5.0", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/core/-/core-6.5.0.tgz", + "integrity": "sha512-jIbu36GMjfK8HCCQitkfVVeQ2vSXGfq0ef0GO9HUxZGjal6Kvpkk4PwpkFP+OyCzF+skQFT9aWrUqekT3pKF8w==", "requires": { - "@svgr/plugin-jsx": "^5.5.0", + "@babel/core": "^7.18.5", + "@svgr/babel-preset": "^6.5.0", + "@svgr/plugin-jsx": "^6.5.0", "camelcase": "^6.2.0", - "cosmiconfig": "^7.0.0" + "cosmiconfig": "^7.0.1" } }, "@svgr/hast-util-to-babel-ast": { - "version": "5.5.0", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.5.0.tgz", + "integrity": "sha512-PPy94U/EiPQ2dY0b4jEqj4QOdDRq6DG7aTHjpGaL8HlKSHkpU1DpjfywCXTJqtOdCo2FywjWvg0U2FhqMeUJaA==", "requires": { - "@babel/types": "^7.12.6" + "@babel/types": "^7.18.4", + "entities": "^4.3.0" } }, "@svgr/plugin-jsx": { - "version": "5.5.0", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.5.0.tgz", + "integrity": "sha512-1CHMqOBKoNk/ZPU+iGXKcQPC6q9zaD7UOI99J+BaGY5bdCztcf5bZyi0QZSDRJtCQpdofeVv7XfBYov2mtl0Pw==", "requires": { - "@babel/core": "^7.12.3", - "@svgr/babel-preset": "^5.5.0", - "@svgr/hast-util-to-babel-ast": "^5.5.0", - "svg-parser": "^2.0.2" + "@babel/core": "^7.18.5", + "@svgr/babel-preset": "^6.5.0", + "@svgr/hast-util-to-babel-ast": "^6.5.0", + "svg-parser": "^2.0.4" } }, "@svgr/plugin-svgo": { - "version": "5.5.0", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.5.0.tgz", + "integrity": "sha512-8Zv1Yyv6I7HlIqrqGFM0sDKQrhjbfNZJawR8UjIaVWSb0tKZP1Ra6ymhqIFu6FT6kDRD0Ct5NlQZ10VUujSspw==", "requires": { - "cosmiconfig": "^7.0.0", + "cosmiconfig": "^7.0.1", "deepmerge": "^4.2.2", - "svgo": "^1.2.2" + "svgo": "^2.8.0" } }, "@svgr/webpack": { - "version": "5.5.0", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.5.0.tgz", + "integrity": "sha512-rM/Z4pwMhqvAXEHoHIlE4SeTb0ToQNmJuBdiHwhP2ZtywyX6XqrgCv2WX7K/UCgNYJgYbekuylgyjnuLUHTcZQ==", "requires": { - "@babel/core": "^7.12.3", - "@babel/plugin-transform-react-constant-elements": "^7.12.1", - "@babel/preset-env": "^7.12.1", - "@babel/preset-react": "^7.12.5", - "@svgr/core": "^5.5.0", - "@svgr/plugin-jsx": "^5.5.0", - "@svgr/plugin-svgo": "^5.5.0", - "loader-utils": "^2.0.0" + "@babel/core": "^7.18.5", + "@babel/plugin-transform-react-constant-elements": "^7.17.12", + "@babel/preset-env": "^7.18.2", + "@babel/preset-react": "^7.17.12", + "@babel/preset-typescript": "^7.17.12", + "@svgr/core": "^6.5.0", + "@svgr/plugin-jsx": "^6.5.0", + "@svgr/plugin-svgo": "^6.5.0" } }, "@swc/core": { - "version": "1.2.174", + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.9.tgz", + "integrity": "sha512-PCRCO9vIoEX3FyS3z/FkWVYJzuspUq0LLaWdK3L30+KQDtH29K+LQdRc2Dzin2MU5MpY4bSHydAwl9M6cmZ9OA==", + "dev": true, + "requires": { + "@swc/core-android-arm-eabi": "1.3.9", + "@swc/core-android-arm64": "1.3.9", + "@swc/core-darwin-arm64": "1.3.9", + "@swc/core-darwin-x64": "1.3.9", + "@swc/core-freebsd-x64": "1.3.9", + "@swc/core-linux-arm-gnueabihf": "1.3.9", + "@swc/core-linux-arm64-gnu": "1.3.9", + "@swc/core-linux-arm64-musl": "1.3.9", + "@swc/core-linux-x64-gnu": "1.3.9", + "@swc/core-linux-x64-musl": "1.3.9", + "@swc/core-win32-arm64-msvc": "1.3.9", + "@swc/core-win32-ia32-msvc": "1.3.9", + "@swc/core-win32-x64-msvc": "1.3.9" + } + }, + "@swc/core-android-arm-eabi": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-android-arm-eabi/-/core-android-arm-eabi-1.3.9.tgz", + "integrity": "sha512-+F+sU2l49Po4tJoNtIpFwt0k1sspymvPMM+DCpnkHF1idzRiOU5NGgVzmLDjoO9AnxHa7EBJ3itN+PP2Dd06+A==", + "dev": true, + "optional": true, + "requires": { + "@swc/wasm": "1.2.122" + } + }, + "@swc/core-android-arm64": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-android-arm64/-/core-android-arm64-1.3.9.tgz", + "integrity": "sha512-HSWdex3yd4CRefkM2WVz0nTKjpirNZnwSlghqe4ct9QAYGMiiPesYgWPAnq/PpnYfmjQse4yvEclamGiek6zDA==", "dev": true, + "optional": true, "requires": { - "@swc/core-android-arm-eabi": "1.2.174", - "@swc/core-android-arm64": "1.2.174", - "@swc/core-darwin-arm64": "1.2.174", - "@swc/core-darwin-x64": "1.2.174", - "@swc/core-freebsd-x64": "1.2.174", - "@swc/core-linux-arm-gnueabihf": "1.2.174", - "@swc/core-linux-arm64-gnu": "1.2.174", - "@swc/core-linux-arm64-musl": "1.2.174", - "@swc/core-linux-x64-gnu": "1.2.174", - "@swc/core-linux-x64-musl": "1.2.174", - "@swc/core-win32-arm64-msvc": "1.2.174", - "@swc/core-win32-ia32-msvc": "1.2.174", - "@swc/core-win32-x64-msvc": "1.2.174" + "@swc/wasm": "1.2.130" + }, + "dependencies": { + "@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + } } }, "@swc/core-darwin-arm64": { - "version": "1.2.174", + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.9.tgz", + "integrity": "sha512-E7WJY1LsMJtOtUYc/JXl8qlt6USnzodWmdO1eAAOSAODEdX9AjgG3fRT94o3UcmvMrto7sxBXVExj8wG7Cxeng==", + "dev": true, + "optional": true + }, + "@swc/core-darwin-x64": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.9.tgz", + "integrity": "sha512-0+dFCAcLEBxwIO+0Nt+OT8mjPpvBMBWIuFWB1DNiUu2K73+OB0i+llzsCJFoasISHR+YJD0bGyv+8AtVuUdFAw==", + "dev": true, + "optional": true + }, + "@swc/core-freebsd-x64": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-freebsd-x64/-/core-freebsd-x64-1.3.9.tgz", + "integrity": "sha512-JbHIeklQPRBEZUfKAKt/IB/ayi7dJZ9tEGu/fDxNfk8Znu1Md+YOKRyN5FPMXfYrL5yFUXnlFOb2LX6wjNhhjQ==", + "dev": true, + "optional": true, + "requires": { + "@swc/wasm": "1.2.130" + }, + "dependencies": { + "@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + } + } + }, + "@swc/core-linux-arm-gnueabihf": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.9.tgz", + "integrity": "sha512-Yc1G8FGXmq6yGKtu5wYCcvVWBtqU0/3FUk6zJM+7pFiivKsVHJcgWrkgLO1u6h7bgEdQIYwfM3/BbRNE5CtdnA==", + "dev": true, + "optional": true, + "requires": { + "@swc/wasm": "1.2.130" + }, + "dependencies": { + "@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + } + } + }, + "@swc/core-linux-arm64-gnu": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.9.tgz", + "integrity": "sha512-PrBjmPIMhoQLCpfaZl2b1cCXnaNPddQB/ssMVqQ6eXChBJfcv14M5BjxtI2ORi4HoEDlsbX+k50sL666M3lnBw==", + "dev": true, + "optional": true + }, + "@swc/core-linux-arm64-musl": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.9.tgz", + "integrity": "sha512-jJT56vt81o2N3O2nXp+MZGM6mbgkNx6lvvRT6yISW29fLM6NHBXmkGcjaWOD9VFJDRmu/MtFxbElPxr6ikrFYQ==", + "dev": true, + "optional": true + }, + "@swc/core-linux-x64-gnu": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.9.tgz", + "integrity": "sha512-60ZreTvrJk3N7xvPzQeQJDePsXUmSUZkKD6lc0xzug4bv53NyUIQ8gH8nzVsV++D9NZeVxXp6WqqFLcgt7yEDQ==", + "dev": true, + "optional": true + }, + "@swc/core-linux-x64-musl": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.9.tgz", + "integrity": "sha512-UBApPfUSP+w6ye6V1oT4EGh3LFCFrZaQsC1CkTuiYXXSmQMzkYE0Jzegn3R7MHWCJSneRwXRTKrkdhrNBUqWKA==", + "dev": true, + "optional": true + }, + "@swc/core-win32-arm64-msvc": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.9.tgz", + "integrity": "sha512-4FQSalXbbnqTLVGRljRnw/bJ99Jwj1WnXz/aJM/SVL8S9Zbc82+3v+wXL/9NGwaAndu2QUkb2KPYNAHvB7PCdw==", + "dev": true, + "optional": true, + "requires": { + "@swc/wasm": "1.2.130" + }, + "dependencies": { + "@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + } + } + }, + "@swc/core-win32-ia32-msvc": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.9.tgz", + "integrity": "sha512-ZkTw1Cm+b2QBf/NjkJJbocvgT0NWdfPQL0OyMkuTAinRzfrMmq/lmshjnqj3ysFVeI4uuJTNemiT6mivpLmuBw==", + "dev": true, + "optional": true, + "requires": { + "@swc/wasm": "1.2.130" + }, + "dependencies": { + "@swc/wasm": { + "version": "1.2.130", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.130.tgz", + "integrity": "sha512-rNcJsBxS70+pv8YUWwf5fRlWX6JoY/HJc25HD/F8m6Kv7XhJdqPPMhyX6TKkUBPAG7TWlZYoxa+rHAjPy4Cj3Q==", + "dev": true, + "optional": true + } + } + }, + "@swc/core-win32-x64-msvc": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.9.tgz", + "integrity": "sha512-moKi2prCKzYnXXlrLf5nwAN4uGSm4YpsW2xzYiZWJJDRqu74VoUWoDkG25jalHTfN/PSBQg4dkFWhhUe89JJVw==", "dev": true, "optional": true }, "@swc/jest": { - "version": "0.2.20", + "version": "0.2.23", + "resolved": "https://registry.npmjs.org/@swc/jest/-/jest-0.2.23.tgz", + "integrity": "sha512-ZLj17XjHbPtNsgqjm83qizENw05emLkKGu3WuPUttcy9hkngl0/kcc7fDbcSBpADS0GUtsO+iKPjZFWVAtJSlA==", "dev": true, "requires": { - "@jest/create-cache-key-function": "^27.4.2" + "@jest/create-cache-key-function": "^27.4.2", + "jsonc-parser": "^3.2.0" } }, + "@swc/wasm": { + "version": "1.2.122", + "resolved": "https://registry.npmjs.org/@swc/wasm/-/wasm-1.2.122.tgz", + "integrity": "sha512-sM1VCWQxmNhFtdxME+8UXNyPNhxNu7zdb6ikWpz0YKAQQFRGT5ThZgJrubEpah335SUToNg8pkdDF7ibVCjxbQ==", + "dev": true, + "optional": true + }, "@szmarczak/http-timer": { "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", + "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", "requires": { "defer-to-connect": "^1.0.1" } }, "@testing-library/dom": { - "version": "8.13.0", + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.19.0.tgz", + "integrity": "sha512-6YWYPPpxG3e/xOo6HIWwB/58HukkwIVTOaZ0VwdMVjhRUX/01E4FtQbck9GazOOj7MXHc5RBzMrU86iBJHbI+A==", "dev": true, "requires": { "@babel/code-frame": "^7.10.4", @@ -21602,6 +26527,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -21609,6 +26536,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -21617,6 +26546,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -21624,14 +26555,20 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -21640,14 +26577,16 @@ } }, "@testing-library/jest-dom": { - "version": "5.16.4", + "version": "5.16.5", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-5.16.5.tgz", + "integrity": "sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA==", "dev": true, "requires": { + "@adobe/css-tools": "^4.0.1", "@babel/runtime": "^7.9.2", "@types/testing-library__jest-dom": "^5.9.1", "aria-query": "^5.0.0", "chalk": "^3.0.0", - "css": "^3.0.0", "css.escape": "^1.5.1", "dom-accessibility-api": "^0.5.6", "lodash": "^4.17.15", @@ -21656,6 +26595,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -21663,6 +26604,8 @@ }, "chalk": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -21671,6 +26614,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -21678,14 +26623,20 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -21695,6 +26646,8 @@ }, "@testing-library/react": { "version": "12.1.5", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.5.tgz", + "integrity": "sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg==", "dev": true, "requires": { "@babel/runtime": "^7.12.5", @@ -21704,6 +26657,8 @@ }, "@testing-library/user-event": { "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-13.5.0.tgz", + "integrity": "sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg==", "dev": true, "requires": { "@babel/runtime": "^7.12.5" @@ -21711,17 +26666,25 @@ }, "@tootallnate/once": { "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", "dev": true }, "@trysound/sax": { - "version": "0.2.0" + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", + "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==" }, "@types/aria-query": { "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-4.2.2.tgz", + "integrity": "sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig==", "dev": true }, "@types/babel__core": { "version": "7.1.19", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.19.tgz", + "integrity": "sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw==", "dev": true, "requires": { "@babel/parser": "^7.1.0", @@ -21733,6 +26696,8 @@ }, "@types/babel__generator": { "version": "7.6.4", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz", + "integrity": "sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==", "dev": true, "requires": { "@babel/types": "^7.0.0" @@ -21740,6 +26705,8 @@ }, "@types/babel__template": { "version": "7.4.1", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.1.tgz", + "integrity": "sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==", "dev": true, "requires": { "@babel/parser": "^7.1.0", @@ -21747,7 +26714,9 @@ } }, "@types/babel__traverse": { - "version": "7.17.1", + "version": "7.18.2", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.18.2.tgz", + "integrity": "sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg==", "dev": true, "requires": { "@babel/types": "^7.3.0" @@ -21755,6 +26724,8 @@ }, "@types/body-parser": { "version": "1.19.2", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", "requires": { "@types/connect": "*", "@types/node": "*" @@ -21762,42 +26733,56 @@ }, "@types/bonjour": { "version": "3.5.10", + "resolved": "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.10.tgz", + "integrity": "sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw==", "requires": { "@types/node": "*" } }, "@types/connect": { "version": "3.4.35", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", "requires": { "@types/node": "*" } }, "@types/connect-history-api-fallback": { "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz", + "integrity": "sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw==", "requires": { "@types/express-serve-static-core": "*", "@types/node": "*" } }, "@types/eslint": { - "version": "8.4.1", + "version": "8.4.6", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.6.tgz", + "integrity": "sha512-/fqTbjxyFUaYNO7VcW5g+4npmqVACz1bB7RTHYuLj+PRjw9hrCwrUXVQFpChUS0JsyEFvMZ7U/PfmvWgxJhI9g==", "requires": { "@types/estree": "*", "@types/json-schema": "*" } }, "@types/eslint-scope": { - "version": "3.7.3", + "version": "3.7.4", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.4.tgz", + "integrity": "sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA==", "requires": { "@types/eslint": "*", "@types/estree": "*" } }, "@types/estree": { - "version": "0.0.51" + "version": "0.0.51", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", + "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==" }, "@types/express": { - "version": "4.17.13", + "version": "4.17.14", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.14.tgz", + "integrity": "sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg==", "requires": { "@types/body-parser": "*", "@types/express-serve-static-core": "^4.17.18", @@ -21806,7 +26791,9 @@ } }, "@types/express-serve-static-core": { - "version": "4.17.28", + "version": "4.17.31", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.31.tgz", + "integrity": "sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q==", "requires": { "@types/node": "*", "@types/qs": "*", @@ -21815,6 +26802,8 @@ }, "@types/graceful-fs": { "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz", + "integrity": "sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==", "dev": true, "requires": { "@types/node": "*" @@ -21822,28 +26811,40 @@ }, "@types/hast": { "version": "2.3.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz", + "integrity": "sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==", "requires": { "@types/unist": "*" } }, "@types/history": { - "version": "4.7.11" + "version": "4.7.11", + "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz", + "integrity": "sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA==" }, "@types/html-minifier-terser": { - "version": "6.1.0" + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", + "integrity": "sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==" }, "@types/http-proxy": { - "version": "1.17.8", + "version": "1.17.9", + "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.9.tgz", + "integrity": "sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw==", "requires": { "@types/node": "*" } }, "@types/istanbul-lib-coverage": { "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", "dev": true }, "@types/istanbul-lib-report": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", "dev": true, "requires": { "@types/istanbul-lib-coverage": "*" @@ -21851,61 +26852,110 @@ }, "@types/istanbul-reports": { "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", "dev": true, "requires": { "@types/istanbul-lib-report": "*" } }, "@types/jest": { - "version": "27.4.1", + "version": "29.2.0", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.2.0.tgz", + "integrity": "sha512-KO7bPV21d65PKwv3LLsD8Jn3E05pjNjRZvkm+YTacWhVmykAb07wW6IkZUmQAltwQafNcDUEUrMO2h3jeBSisg==", "dev": true, "requires": { - "jest-matcher-utils": "^27.0.0", - "pretty-format": "^27.0.0" + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + }, + "pretty-format": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.2.1.tgz", + "integrity": "sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA==", + "dev": true, + "requires": { + "@jest/schemas": "^29.0.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + } + } } }, "@types/json-schema": { - "version": "7.0.11" + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==" + }, + "@types/katex": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.11.1.tgz", + "integrity": "sha512-DUlIj2nk0YnJdlWgsFuVKcX27MLW0KbKmGVoUHmFr+74FYYNUDAaj9ZqTADvsbE8rfxuVmSFc7KczYn5Y09ozg==" }, "@types/lodash": { - "version": "4.14.182" + "version": "4.14.186", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.186.tgz", + "integrity": "sha512-eHcVlLXP0c2FlMPm56ITode2AgLMSa6aJ05JTTbYbI+7EMkCEE5qk2E41d5g2lCVTqRe0GnnRFurmlCsDODrPw==" }, "@types/mdast": { "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz", + "integrity": "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==", "requires": { "@types/unist": "*" } }, "@types/mime": { - "version": "1.3.2" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz", + "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==" }, "@types/node": { - "version": "17.0.31" + "version": "18.11.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", + "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==" }, "@types/parse-json": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" }, "@types/parse5": { - "version": "5.0.3" + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz", + "integrity": "sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==" }, "@types/prettier": { - "version": "2.6.0", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.1.tgz", + "integrity": "sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow==", "dev": true }, "@types/prop-types": { - "version": "15.7.5" - }, - "@types/q": { - "version": "1.5.5" + "version": "15.7.5", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", + "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" }, "@types/qs": { - "version": "6.9.7" + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" }, "@types/range-parser": { - "version": "1.2.4" + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" }, "@types/react": { - "version": "17.0.44", + "version": "18.0.21", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.0.21.tgz", + "integrity": "sha512-7QUCOxvFgnD5Jk8ZKlUAhVcRj7GuJRjnjjiY/IUBWKgOlnvDvTMLD4RTF7NPyVmbRhNrbomZiOepg7M/2Kj1mA==", "requires": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -21913,14 +26963,31 @@ } }, "@types/react-dom": { - "version": "17.0.16", + "version": "17.0.17", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.17.tgz", + "integrity": "sha512-VjnqEmqGnasQKV0CWLevqMTXBYG9GbwuE6x3VetERLh0cq2LTptFE73MrQi2S7GkKXCf2GgwItB/melLnxfnsg==", "dev": true, "requires": { "@types/react": "^17" + }, + "dependencies": { + "@types/react": { + "version": "17.0.50", + "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", + "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", + "dev": true, + "requires": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + } } }, "@types/react-router": { - "version": "5.1.18", + "version": "5.1.19", + "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.19.tgz", + "integrity": "sha512-Fv/5kb2STAEMT3wHzdKQK2z8xKq38EDIGVrutYLmQVVLe+4orDFquU52hQrULnEHinMKv9FSA6lf9+uNT1ITtA==", "requires": { "@types/history": "^4.7.11", "@types/react": "*" @@ -21928,6 +26995,8 @@ }, "@types/react-router-config": { "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@types/react-router-config/-/react-router-config-5.0.6.tgz", + "integrity": "sha512-db1mx37a1EJDf1XeX8jJN7R3PZABmJQXR8r28yUjVMFSjkmnQo6X6pOEEmNl+Tp2gYQOGPdYbFIipBtdElZ3Yg==", "requires": { "@types/history": "^4.7.11", "@types/react": "*", @@ -21936,6 +27005,8 @@ }, "@types/react-router-dom": { "version": "5.3.3", + "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.3.3.tgz", + "integrity": "sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==", "requires": { "@types/history": "^4.7.11", "@types/react": "*", @@ -21943,58 +27014,92 @@ } }, "@types/retry": { - "version": "0.12.0" + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==" }, "@types/sax": { "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-pSAff4IAxJjfAXUG6tFkO7dsSbTmf8CtUpfhhZ5VhkRpC4628tJhh3+V6H1E+/Gs9piSzYKT5yzHO5M4GG9jkw==", "requires": { "@types/node": "*" } }, "@types/scheduler": { - "version": "0.16.2" + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", + "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==" }, "@types/serve-index": { "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz", + "integrity": "sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg==", "requires": { "@types/express": "*" } }, "@types/serve-static": { - "version": "1.13.10", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==", "requires": { - "@types/mime": "^1", + "@types/mime": "*", "@types/node": "*" } }, + "@types/sinonjs__fake-timers": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", + "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", + "dev": true + }, + "@types/sizzle": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.3.tgz", + "integrity": "sha512-JYM8x9EGF163bEyhdJBpR2QX1R5naCJHC8ucJylJ3w9/CVBaskdQ8WqBf8MmQrd1kRvp/a4TS8HJ+bxzR7ZJYQ==", + "dev": true + }, "@types/sockjs": { "version": "0.3.33", + "resolved": "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.33.tgz", + "integrity": "sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw==", "requires": { "@types/node": "*" } }, "@types/stack-utils": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", "dev": true }, "@types/testing-library__jest-dom": { - "version": "5.14.3", + "version": "5.14.5", + "resolved": "https://registry.npmjs.org/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.5.tgz", + "integrity": "sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ==", "dev": true, "requires": { "@types/jest": "*" } }, "@types/unist": { - "version": "2.0.6" + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz", + "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" }, "@types/ws": { "version": "8.5.3", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.3.tgz", + "integrity": "sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w==", "requires": { "@types/node": "*" } }, "@types/yargs": { "version": "16.0.4", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz", + "integrity": "sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw==", "dev": true, "requires": { "@types/yargs-parser": "*" @@ -22002,26 +27107,48 @@ }, "@types/yargs-parser": { "version": "21.0.0", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz", + "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==", "dev": true }, + "@types/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-Cn6WYCm0tXv8p6k+A8PvbDG763EDpBoTzHdA+Q/MF6H3sapGjCm9NzoaJncJS9tUKSuCoDs9XHxYYsQDgxR6kw==", + "dev": true, + "optional": true, + "requires": { + "@types/node": "*" + } + }, "@webassemblyjs/ast": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", + "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==", "requires": { "@webassemblyjs/helper-numbers": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1" } }, "@webassemblyjs/floating-point-hex-parser": { - "version": "1.11.1" + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz", + "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==" }, "@webassemblyjs/helper-api-error": { - "version": "1.11.1" + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz", + "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==" }, "@webassemblyjs/helper-buffer": { - "version": "1.11.1" + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz", + "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==" }, "@webassemblyjs/helper-numbers": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz", + "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==", "requires": { "@webassemblyjs/floating-point-hex-parser": "1.11.1", "@webassemblyjs/helper-api-error": "1.11.1", @@ -22029,10 +27156,14 @@ } }, "@webassemblyjs/helper-wasm-bytecode": { - "version": "1.11.1" + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz", + "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==" }, "@webassemblyjs/helper-wasm-section": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz", + "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", @@ -22042,21 +27173,29 @@ }, "@webassemblyjs/ieee754": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz", + "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==", "requires": { "@xtuc/ieee754": "^1.2.0" } }, "@webassemblyjs/leb128": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz", + "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==", "requires": { "@xtuc/long": "4.2.2" } }, "@webassemblyjs/utf8": { - "version": "1.11.1" + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz", + "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==" }, "@webassemblyjs/wasm-edit": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz", + "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", @@ -22070,6 +27209,8 @@ }, "@webassemblyjs/wasm-gen": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz", + "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-wasm-bytecode": "1.11.1", @@ -22080,6 +27221,8 @@ }, "@webassemblyjs/wasm-opt": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz", + "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-buffer": "1.11.1", @@ -22089,6 +27232,8 @@ }, "@webassemblyjs/wasm-parser": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz", + "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==", "requires": { "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/helper-api-error": "1.11.1", @@ -22100,33 +27245,47 @@ }, "@webassemblyjs/wast-printer": { "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz", + "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==", "requires": { "@webassemblyjs/ast": "1.11.1", "@xtuc/long": "4.2.2" } }, "@xtuc/ieee754": { - "version": "1.2.0" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==" }, "@xtuc/long": { - "version": "4.2.2" + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==" }, "abab": { "version": "2.0.6", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", "dev": true }, "accepts": { "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", "requires": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "acorn": { - "version": "8.7.1" + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.0.tgz", + "integrity": "sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==" }, "acorn-globals": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-6.0.0.tgz", + "integrity": "sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==", "dev": true, "requires": { "acorn": "^7.1.1", @@ -22135,23 +27294,33 @@ "dependencies": { "acorn": { "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", "dev": true } } }, "acorn-import-assertions": { "version": "1.8.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz", + "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==", "requires": {} }, "acorn-walk": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", + "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", "dev": true }, "address": { - "version": "1.2.0" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/address/-/address-1.2.1.tgz", + "integrity": "sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA==" }, "agent-base": { "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "dev": true, "requires": { "debug": "4" @@ -22159,6 +27328,8 @@ }, "aggregate-error": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", "requires": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" @@ -22166,6 +27337,8 @@ }, "ajv": { "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "requires": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -22174,18 +27347,24 @@ }, "dependencies": { "json-schema-traverse": { - "version": "0.4.1" + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" } } }, "ajv-formats": { "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", "requires": { "ajv": "^8.0.0" }, "dependencies": { "ajv": { "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -22197,44 +27376,56 @@ }, "ajv-keywords": { "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", "requires": {} }, "algoliasearch": { - "version": "4.13.0", - "requires": { - "@algolia/cache-browser-local-storage": "4.13.0", - "@algolia/cache-common": "4.13.0", - "@algolia/cache-in-memory": "4.13.0", - "@algolia/client-account": "4.13.0", - "@algolia/client-analytics": "4.13.0", - "@algolia/client-common": "4.13.0", - "@algolia/client-personalization": "4.13.0", - "@algolia/client-search": "4.13.0", - "@algolia/logger-common": "4.13.0", - "@algolia/logger-console": "4.13.0", - "@algolia/requester-browser-xhr": "4.13.0", - "@algolia/requester-common": "4.13.0", - "@algolia/requester-node-http": "4.13.0", - "@algolia/transporter": "4.13.0" + "version": "4.14.2", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.14.2.tgz", + "integrity": "sha512-ngbEQonGEmf8dyEh5f+uOIihv4176dgbuOZspiuhmTTBRBuzWu3KCGHre6uHj5YyuC7pNvQGzB6ZNJyZi0z+Sg==", + "requires": { + "@algolia/cache-browser-local-storage": "4.14.2", + "@algolia/cache-common": "4.14.2", + "@algolia/cache-in-memory": "4.14.2", + "@algolia/client-account": "4.14.2", + "@algolia/client-analytics": "4.14.2", + "@algolia/client-common": "4.14.2", + "@algolia/client-personalization": "4.14.2", + "@algolia/client-search": "4.14.2", + "@algolia/logger-common": "4.14.2", + "@algolia/logger-console": "4.14.2", + "@algolia/requester-browser-xhr": "4.14.2", + "@algolia/requester-common": "4.14.2", + "@algolia/requester-node-http": "4.14.2", + "@algolia/transporter": "4.14.2" } }, "algoliasearch-helper": { - "version": "3.8.2", + "version": "3.11.1", + "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.11.1.tgz", + "integrity": "sha512-mvsPN3eK4E0bZG0/WlWJjeqe/bUD2KOEVOl0GyL/TGXn6wcpZU8NOuztGHCUKXkyg5gq6YzUakVTmnmSSO5Yiw==", "requires": { "@algolia/events": "^4.0.1" } }, "ansi-align": { "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", + "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", "requires": { "string-width": "^4.1.0" }, "dependencies": { "emoji-regex": { - "version": "8.0.0" + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "string-width": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -22243,8 +27434,16 @@ } } }, + "ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true + }, "ansi-escapes": { "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dev": true, "requires": { "type-fest": "^0.21.3" @@ -22252,86 +27451,94 @@ "dependencies": { "type-fest": { "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", "dev": true } } }, "ansi-html-community": { - "version": "0.0.8" + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", + "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==" }, "ansi-regex": { - "version": "5.0.1" + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "ansi-styles": { "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "requires": { "color-convert": "^1.9.0" } }, "any-base": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/any-base/-/any-base-1.1.0.tgz", + "integrity": "sha512-uMgjozySS8adZZYePpaWs8cxB9/kdzmpX6SgJZ+wbz1K5eYk5QMYDVJaZKhxyIHUdnnJkfR7SVgStgH7LkGUyg==" }, "anymatch": { "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", "requires": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, - "aproba": { - "version": "1.2.0" - }, - "are-we-there-yet": { - "version": "1.1.7", - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^2.0.6" - }, - "dependencies": { - "isarray": { - "version": "1.0.0" - }, - "readable-stream": { - "version": "2.3.7", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "requires": { - "safe-buffer": "~5.1.0" - } - } - } + "arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "dev": true }, "arg": { - "version": "5.0.1" + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==" }, "argparse": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "aria-query": { - "version": "5.0.0", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.0.2.tgz", + "integrity": "sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q==", "dev": true }, "array-flatten": { - "version": "2.1.2" + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", + "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==" }, "array-union": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==" }, "asap": { - "version": "2.0.6" + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dev": true, + "requires": { + "safer-buffer": "~2.1.0" + } }, "asn1.js": { "version": "5.4.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", + "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", "requires": { "bn.js": "^4.0.0", "inherits": "^2.0.1", @@ -22340,12 +27547,16 @@ }, "dependencies": { "bn.js": { - "version": "4.12.0" + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" } } }, "assert": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/assert/-/assert-2.0.0.tgz", + "integrity": "sha512-se5Cd+js9dXJnu6Ag2JFc00t+HmHOen+8Q+L7O9zI0PqQXr20uk2J0XQqMxZEeo5U50o8Nvmmx7dZrl+Ufr35A==", "requires": { "es6-object-assign": "^1.1.0", "is-nan": "^1.2.1", @@ -22353,28 +27564,41 @@ "util": "^0.12.0" } }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "dev": true + }, + "astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true + }, "async": { - "version": "2.6.4", - "requires": { - "lodash": "^4.17.14" - } + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==", + "dev": true }, "asynckit": { "version": "0.4.0", - "dev": true + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "at-least-node": { - "version": "1.0.0" - }, - "atob": { - "version": "2.1.2", - "dev": true + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==" }, "autoprefixer": { - "version": "10.4.7", + "version": "10.4.12", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.12.tgz", + "integrity": "sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q==", "requires": { - "browserslist": "^4.20.3", - "caniuse-lite": "^1.0.30001335", + "browserslist": "^4.21.4", + "caniuse-lite": "^1.0.30001407", "fraction.js": "^4.2.0", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", @@ -22382,16 +27606,35 @@ } }, "available-typed-arrays": { - "version": "1.0.5" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", + "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==" + }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", + "dev": true + }, + "aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", + "dev": true }, "axios": { - "version": "0.25.0", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz", + "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==", "requires": { - "follow-redirects": "^1.14.7" + "follow-redirects": "^1.14.9", + "form-data": "^4.0.0" } }, "babel-jest": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-27.5.1.tgz", + "integrity": "sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg==", "dev": true, "requires": { "@jest/transform": "^27.5.1", @@ -22406,6 +27649,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -22413,6 +27658,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -22421,6 +27668,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -22428,14 +27677,20 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -22445,6 +27700,8 @@ }, "babel-loader": { "version": "8.2.5", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.5.tgz", + "integrity": "sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ==", "requires": { "find-cache-dir": "^3.3.1", "loader-utils": "^2.0.0", @@ -22454,35 +27711,47 @@ }, "babel-plugin-apply-mdx-type-prop": { "version": "1.6.22", + "resolved": "https://registry.npmjs.org/babel-plugin-apply-mdx-type-prop/-/babel-plugin-apply-mdx-type-prop-1.6.22.tgz", + "integrity": "sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ==", "requires": { "@babel/helper-plugin-utils": "7.10.4", "@mdx-js/util": "1.6.22" }, "dependencies": { "@babel/helper-plugin-utils": { - "version": "7.10.4" + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", + "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" } } }, "babel-plugin-dynamic-import-node": { "version": "2.3.0", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz", + "integrity": "sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ==", "requires": { "object.assign": "^4.1.0" } }, "babel-plugin-extract-import-names": { "version": "1.6.22", + "resolved": "https://registry.npmjs.org/babel-plugin-extract-import-names/-/babel-plugin-extract-import-names-1.6.22.tgz", + "integrity": "sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ==", "requires": { "@babel/helper-plugin-utils": "7.10.4" }, "dependencies": { "@babel/helper-plugin-utils": { - "version": "7.10.4" + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", + "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" } } }, "babel-plugin-istanbul": { "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", "dev": true, "requires": { "@babel/helper-plugin-utils": "^7.0.0", @@ -22494,6 +27763,8 @@ }, "babel-plugin-jest-hoist": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz", + "integrity": "sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ==", "dev": true, "requires": { "@babel/template": "^7.3.3", @@ -22503,33 +27774,43 @@ } }, "babel-plugin-polyfill-corejs2": { - "version": "0.3.1", + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz", + "integrity": "sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q==", "requires": { - "@babel/compat-data": "^7.13.11", - "@babel/helper-define-polyfill-provider": "^0.3.1", + "@babel/compat-data": "^7.17.7", + "@babel/helper-define-polyfill-provider": "^0.3.3", "semver": "^6.1.1" }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "babel-plugin-polyfill-corejs3": { - "version": "0.5.2", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz", + "integrity": "sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA==", "requires": { - "@babel/helper-define-polyfill-provider": "^0.3.1", - "core-js-compat": "^3.21.0" + "@babel/helper-define-polyfill-provider": "^0.3.3", + "core-js-compat": "^3.25.1" } }, "babel-plugin-polyfill-regenerator": { - "version": "0.3.1", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz", + "integrity": "sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw==", "requires": { - "@babel/helper-define-polyfill-provider": "^0.3.1" + "@babel/helper-define-polyfill-provider": "^0.3.3" } }, "babel-plugin-styled-components": { "version": "2.0.7", + "resolved": "https://registry.npmjs.org/babel-plugin-styled-components/-/babel-plugin-styled-components-2.0.7.tgz", + "integrity": "sha512-i7YhvPgVqRKfoQ66toiZ06jPNA3p6ierpfUuEWxNF+fV27Uv5gxBkf8KZLHUCc1nFA9j6+80pYoIpqCeyW3/bA==", "requires": { "@babel/helper-annotate-as-pure": "^7.16.0", "@babel/helper-module-imports": "^7.16.0", @@ -22539,10 +27820,14 @@ } }, "babel-plugin-syntax-jsx": { - "version": "6.18.0" + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz", + "integrity": "sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw==" }, "babel-preset-current-node-syntax": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", + "integrity": "sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==", "dev": true, "requires": { "@babel/plugin-syntax-async-generators": "^7.8.4", @@ -22561,6 +27846,8 @@ }, "babel-preset-jest": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz", + "integrity": "sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag==", "dev": true, "requires": { "babel-plugin-jest-hoist": "^27.5.1", @@ -22568,54 +27855,85 @@ } }, "bail": { - "version": "1.0.5" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", + "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==" }, "balanced-match": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "base16": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz", + "integrity": "sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==" }, "base64-js": { - "version": "1.5.1" + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, "batch": { - "version": "0.6.1" + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", + "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==" + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dev": true, + "requires": { + "tweetnacl": "^0.14.3" + } }, "big.js": { - "version": "5.2.2" + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==" }, "binary-extensions": { - "version": "2.2.0" + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==" }, "bl": { "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", "requires": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" - }, - "dependencies": { - "buffer": { - "version": "5.7.1", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - } } }, + "blob-util": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", + "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", + "dev": true + }, "bluebird": { - "version": "3.7.2" + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true }, "bmp-js": { - "version": "0.1.0" + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/bmp-js/-/bmp-js-0.1.0.tgz", + "integrity": "sha512-vHdS19CnY3hwiNdkaqk93DvjVLfbEcI8mys4UjuWrlX1haDmroo8o4xCzh4wD6DGV6HxRCyauwhHRqMTfERtjw==" }, "bn.js": { - "version": "5.2.0" + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", + "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" }, "body-parser": { - "version": "1.20.0", + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", + "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", "requires": { "bytes": "3.1.2", "content-type": "~1.0.4", @@ -22625,40 +27943,60 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.10.3", + "qs": "6.11.0", "raw-body": "2.5.1", "type-is": "~1.6.18", "unpipe": "1.0.0" }, "dependencies": { "bytes": { - "version": "3.1.2" + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" }, "debug": { "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "requires": { + "side-channel": "^1.0.4" + } } } }, "bonjour-service": { - "version": "1.0.12", + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.0.14.tgz", + "integrity": "sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ==", "requires": { "array-flatten": "^2.1.2", "dns-equal": "^1.0.0", "fast-deep-equal": "^3.1.3", - "multicast-dns": "^7.2.4" + "multicast-dns": "^7.2.5" } }, "boolbase": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" }, "boxen": { "version": "6.2.1", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-6.2.1.tgz", + "integrity": "sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw==", "requires": { "ansi-align": "^3.0.1", "camelcase": "^6.2.0", @@ -22672,12 +28010,16 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -22685,18 +28027,26 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "has-flag": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "requires": { "has-flag": "^4.0.0" } @@ -22705,6 +28055,8 @@ }, "brace-expansion": { "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -22712,19 +28064,27 @@ }, "braces": { "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "requires": { "fill-range": "^7.0.1" } }, "brorand": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==" }, "browser-process-hrtime": { "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", "dev": true }, "browserify-aes": { "version": "1.2.0", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", "requires": { "buffer-xor": "^1.0.3", "cipher-base": "^1.0.0", @@ -22736,6 +28096,8 @@ }, "browserify-cipher": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", + "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", "requires": { "browserify-aes": "^1.0.4", "browserify-des": "^1.0.0", @@ -22744,6 +28106,8 @@ }, "browserify-des": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", + "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", "requires": { "cipher-base": "^1.0.1", "des.js": "^1.0.0", @@ -22753,6 +28117,8 @@ }, "browserify-rsa": { "version": "4.1.0", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", + "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", "requires": { "bn.js": "^5.0.0", "randombytes": "^2.0.1" @@ -22760,6 +28126,8 @@ }, "browserify-sign": { "version": "4.2.1", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz", + "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==", "requires": { "bn.js": "^5.1.1", "browserify-rsa": "^4.0.1", @@ -22770,60 +28138,80 @@ "parse-asn1": "^5.1.5", "readable-stream": "^3.6.0", "safe-buffer": "^5.2.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1" - } } }, "browserify-zlib": { "version": "0.2.0", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", + "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", "requires": { "pako": "~1.0.5" } }, "browserslist": { - "version": "4.20.3", + "version": "4.21.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", + "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", "requires": { - "caniuse-lite": "^1.0.30001332", - "electron-to-chromium": "^1.4.118", - "escalade": "^3.1.1", - "node-releases": "^2.0.3", - "picocolors": "^1.0.0" + "caniuse-lite": "^1.0.30001400", + "electron-to-chromium": "^1.4.251", + "node-releases": "^2.0.6", + "update-browserslist-db": "^1.0.9" } }, "bser": { "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", "dev": true, "requires": { "node-int64": "^0.4.0" } }, "buffer": { - "version": "6.0.3", + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", "requires": { "base64-js": "^1.3.1", - "ieee754": "^1.2.1" + "ieee754": "^1.1.13" } }, + "buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true + }, "buffer-equal": { - "version": "0.0.1" + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal/-/buffer-equal-0.0.1.tgz", + "integrity": "sha512-RgSV6InVQ9ODPdLWJ5UAqBqJBOg370Nz6ZQtRzpt6nUjc8v0St97uJ4PYC6NztqIScrAXafKM3mZPMygSe1ggA==" }, "buffer-from": { - "version": "1.1.2" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" }, "buffer-xor": { - "version": "1.0.3" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", + "integrity": "sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==" }, "builtin-status-codes": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", + "integrity": "sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ==" }, "bytes": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==" }, "cacheable-request": { "version": "6.1.0", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", "requires": { "clone-response": "^1.0.2", "get-stream": "^5.1.0", @@ -22834,51 +28222,71 @@ "responselike": "^1.0.2" }, "dependencies": { - "get-stream": { - "version": "5.2.0", - "requires": { - "pump": "^3.0.0" - } - }, "lowercase-keys": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==" }, "normalize-url": { - "version": "4.5.1" + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", + "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==" } } }, + "cachedir": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", + "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", + "dev": true + }, "call-bind": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", "requires": { "function-bind": "^1.1.1", "get-intrinsic": "^1.0.2" } }, "call-me-maybe": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.1.tgz", + "integrity": "sha512-wCyFsDQkKPwwF8BDwOiWNx/9K45L/hvggQiDbve+viMNMQnWhrlYIuBk09offfwCRtCO9P6XwUttufzU11WCVw==" }, "callsites": { - "version": "3.1.0" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" }, "camel-case": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", "requires": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "camelcase": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==" }, "camelcase-css": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==" }, "camelize": { - "version": "1.0.0" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/camelize/-/camelize-1.0.1.tgz", + "integrity": "sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==" }, "caniuse-api": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", + "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", "requires": { "browserslist": "^4.0.0", "caniuse-lite": "^1.0.0", @@ -22887,13 +28295,25 @@ } }, "caniuse-lite": { - "version": "1.0.30001335" + "version": "1.0.30001421", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001421.tgz", + "integrity": "sha512-Sw4eLbgUJAEhjLs1Fa+mk45sidp1wRn5y6GtDpHGBaNJ9OCDJaVh2tIaWWUnGfuXfKf1JCBaIarak3FkVAvEeA==" + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", + "dev": true }, "ccount": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz", + "integrity": "sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==" }, "chalk": { "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "requires": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -22902,41 +28322,62 @@ }, "char-regex": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", "dev": true }, "character-entities": { - "version": "1.2.4" + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", + "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==" }, "character-entities-legacy": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", + "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==" }, "character-reference-invalid": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", + "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==" + }, + "check-more-types": { + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", + "integrity": "sha512-Pj779qHxV2tuapviy1bSZNEL1maXr13bPYpsvSDB68HlYcYuhlDrmGd63i0JHMCLKzc7rUSNIrpdJlhVlNwrxA==", + "dev": true }, "cheerio": { - "version": "1.0.0-rc.10", + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", + "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", "requires": { - "cheerio-select": "^1.5.0", - "dom-serializer": "^1.3.2", - "domhandler": "^4.2.0", - "htmlparser2": "^6.1.0", - "parse5": "^6.0.1", - "parse5-htmlparser2-tree-adapter": "^6.0.1", - "tslib": "^2.2.0" + "cheerio-select": "^2.1.0", + "dom-serializer": "^2.0.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "htmlparser2": "^8.0.1", + "parse5": "^7.0.0", + "parse5-htmlparser2-tree-adapter": "^7.0.0" } }, "cheerio-select": { - "version": "1.6.0", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", + "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", "requires": { - "css-select": "^4.3.0", - "css-what": "^6.0.1", - "domelementtype": "^2.2.0", - "domhandler": "^4.3.1", - "domutils": "^2.8.0" + "boolbase": "^1.0.0", + "css-select": "^5.1.0", + "css-what": "^6.1.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1" } }, "chokidar": { "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "requires": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -22949,17 +28390,25 @@ } }, "chownr": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" }, "chrome-trace-event": { - "version": "1.0.3" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", + "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==" }, "ci-info": { - "version": "3.3.0", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.5.0.tgz", + "integrity": "sha512-yH4RezKOGlOhxkmhbeNuC4eYZKAUsEaGtBuBzDDP1eFUKiccDWzBABxBfOx31IDwDIXMTxWuwAxUGModvkbuVw==", "dev": true }, "cipher-base": { "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", "requires": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" @@ -22967,35 +28416,89 @@ }, "cjs-module-lexer": { "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz", + "integrity": "sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==", "dev": true }, "classnames": { - "version": "2.3.1" + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", + "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" }, "clean-css": { - "version": "5.3.0", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-5.3.1.tgz", + "integrity": "sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg==", "requires": { "source-map": "~0.6.0" } }, "clean-stack": { - "version": "2.2.0" + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" }, "cli-boxes": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", + "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==" + }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } }, "cli-table3": { - "version": "0.6.2", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz", + "integrity": "sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==", "requires": { "@colors/colors": "1.5.0", "string-width": "^4.2.0" }, "dependencies": { "emoji-regex": { - "version": "8.0.0" + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + } + } + }, + "cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dev": true, + "requires": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + }, + "dependencies": { + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "string-width": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -23006,6 +28509,9 @@ }, "cliui": { "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, "requires": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", @@ -23014,24 +28520,39 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "requires": { "color-convert": "^2.0.1" } }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "emoji-regex": { - "version": "8.0.0" + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "string-width": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -23040,6 +28561,9 @@ }, "wrap-ansi": { "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, "requires": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -23050,6 +28574,8 @@ }, "clone-deep": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", "requires": { "is-plain-object": "^2.0.4", "kind-of": "^6.0.2", @@ -23057,43 +28583,46 @@ } }, "clone-response": { - "version": "1.0.2", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz", + "integrity": "sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==", "requires": { "mimic-response": "^1.0.0" }, "dependencies": { "mimic-response": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==" } } }, "clsx": { - "version": "1.1.1" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", + "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==" }, "co": { "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", "dev": true }, - "coa": { - "version": "2.0.2", - "requires": { - "@types/q": "^1.5.1", - "chalk": "^2.4.1", - "q": "^1.1.2" - } - }, - "code-point-at": { - "version": "1.1.0" - }, "collapse-white-space": { - "version": "1.0.6" + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz", + "integrity": "sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==" }, "collect-v8-coverage": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz", + "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==", "dev": true }, "color": { "version": "3.2.1", + "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", + "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", "requires": { "color-convert": "^1.9.3", "color-string": "^1.6.0" @@ -23101,53 +28630,82 @@ }, "color-convert": { "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "requires": { "color-name": "1.1.3" } }, "color-name": { - "version": "1.1.3" + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, "color-string": { "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", "requires": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "colord": { - "version": "2.9.2" + "version": "2.9.3", + "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", + "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==" }, "colorette": { - "version": "1.4.0" + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz", + "integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==" }, "combine-promises": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/combine-promises/-/combine-promises-1.1.0.tgz", + "integrity": "sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg==" }, "combined-stream": { "version": "1.0.8", - "dev": true, + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", "requires": { "delayed-stream": "~1.0.0" } }, "comma-separated-tokens": { - "version": "1.0.8" + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", + "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==" }, "commander": { - "version": "5.1.0" + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", + "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==" + }, + "common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true }, "commondir": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==" }, "compressible": { "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", "requires": { "mime-db": ">= 1.43.0 < 2" } }, "compression": { "version": "1.7.4", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", "requires": { "accepts": "~1.3.5", "bytes": "3.0.0", @@ -23160,20 +28718,33 @@ "dependencies": { "debug": { "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" } } }, "concat-map": { - "version": "0.0.1" + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "configstore": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", + "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", "requires": { "dot-prop": "^5.2.0", "graceful-fs": "^4.1.2", @@ -23184,46 +28755,64 @@ } }, "connect-history-api-fallback": { - "version": "1.6.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", + "integrity": "sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==" }, "consola": { - "version": "2.15.3" + "version": "2.15.3", + "resolved": "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz", + "integrity": "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==" }, "console-browserify": { - "version": "1.2.0" - }, - "console-control-strings": { - "version": "1.1.0" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", + "integrity": "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==" }, "consolidated-events": { - "version": "2.0.2" + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/consolidated-events/-/consolidated-events-2.0.2.tgz", + "integrity": "sha512-2/uRVMdRypf5z/TW/ncD/66l75P5hH2vM/GR8Jf8HLc2xnfJtmina6F6du8+v4Z2vTrMo7jC+W1tmEEuuELgkQ==" }, "constants-browserify": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", + "integrity": "sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ==" }, "content-disposition": { - "version": "0.5.2" + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", + "integrity": "sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA==" }, "content-type": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" }, "convert-source-map": { - "version": "1.8.0", - "requires": { - "safe-buffer": "~5.1.1" - } + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==" }, "cookie": { - "version": "0.5.0" + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==" }, "cookie-signature": { - "version": "1.0.6" + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, "copy-text-to-clipboard": { - "version": "3.0.1" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz", + "integrity": "sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q==" }, "copy-webpack-plugin": { "version": "10.2.4", + "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-10.2.4.tgz", + "integrity": "sha512-xFVltahqlsRcyyJqQbDY6EYTtyQZF9rf+JPjwHObLdPFMEISqkFkr7mFoVOC6BfYS/dNThyoQKvziugm+OnwBg==", "requires": { "fast-glob": "^3.2.7", "glob-parent": "^6.0.1", @@ -23235,6 +28824,8 @@ "dependencies": { "ajv": { "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -23244,21 +28835,29 @@ }, "ajv-keywords": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, "array-union": { - "version": "3.0.1" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-3.0.1.tgz", + "integrity": "sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw==" }, "glob-parent": { "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "requires": { "is-glob": "^4.0.3" } }, "globby": { "version": "12.2.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-12.2.0.tgz", + "integrity": "sha512-wiSuFQLZ+urS9x2gGPl1H5drc5twabmm4m2gTR27XDFyjUHJUNsS8o/2aKyIF6IoBaR630atdher0XJ5g6OMmA==", "requires": { "array-union": "^3.0.1", "dir-glob": "^3.0.1", @@ -23270,6 +28869,8 @@ }, "schema-utils": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -23278,33 +28879,39 @@ } }, "slash": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==" } } }, "core-js": { - "version": "3.22.4" + "version": "3.25.5", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.25.5.tgz", + "integrity": "sha512-nbm6eZSjm+ZuBQxCUPQKQCoUEfFOXjUZ8dTTyikyKaWrTYmAVbykQfwsKE5dBK88u3QCkCrzsx/PPlKfhsvgpw==" }, "core-js-compat": { - "version": "3.22.4", + "version": "3.25.5", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.25.5.tgz", + "integrity": "sha512-ovcyhs2DEBUIE0MGEKHP4olCUW/XYte3Vroyxuh38rD1wAO4dHohsovUC4eAOuzFxE6b+RXvBU3UZ9o0YhUTkA==", "requires": { - "browserslist": "^4.20.3", - "semver": "7.0.0" - }, - "dependencies": { - "semver": { - "version": "7.0.0" - } + "browserslist": "^4.21.4" } }, "core-js-pure": { - "version": "3.22.4" + "version": "3.25.5", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.25.5.tgz", + "integrity": "sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg==" }, "core-util-is": { - "version": "1.0.3" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" }, "cosmiconfig": { "version": "7.0.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz", + "integrity": "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==", "requires": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.2.1", @@ -23315,18 +28922,24 @@ }, "create-ecdh": { "version": "4.0.4", + "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz", + "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==", "requires": { "bn.js": "^4.1.0", "elliptic": "^6.5.3" }, "dependencies": { "bn.js": { - "version": "4.12.0" + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" } } }, "create-hash": { "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", "requires": { "cipher-base": "^1.0.1", "inherits": "^2.0.1", @@ -23337,6 +28950,8 @@ }, "create-hmac": { "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", "requires": { "cipher-base": "^1.0.3", "create-hash": "^1.1.0", @@ -23348,12 +28963,16 @@ }, "cross-fetch": { "version": "3.1.5", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz", + "integrity": "sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==", "requires": { "node-fetch": "2.6.7" } }, "cross-spawn": { "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "requires": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -23362,6 +28981,8 @@ }, "crypto-browserify": { "version": "3.12.0", + "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", + "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", "requires": { "browserify-cipher": "^1.0.0", "browserify-sign": "^4.0.0", @@ -23377,26 +28998,25 @@ } }, "crypto-random-string": { - "version": "2.0.0" - }, - "css": { - "version": "3.0.0", - "dev": true, - "requires": { - "inherits": "^2.0.4", - "source-map": "^0.6.1", - "source-map-resolve": "^0.6.0" - } + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==" }, "css-color-keywords": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/css-color-keywords/-/css-color-keywords-1.0.0.tgz", + "integrity": "sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg==" }, "css-declaration-sorter": { - "version": "6.2.2", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz", + "integrity": "sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w==", "requires": {} }, "css-loader": { "version": "3.6.0", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-3.6.0.tgz", + "integrity": "sha512-M5lSukoWi1If8dhQAUCvj4H8vUt3vOnwbQBH9DdTm/s4Ym2B/3dPMtYZeJmq7Q3S3Pa+I94DcZ7pc9bP14cWIQ==", "dev": true, "requires": { "camelcase": "^5.3.1", @@ -23416,10 +29036,14 @@ "dependencies": { "camelcase": { "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", "dev": true }, "json5": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", "dev": true, "requires": { "minimist": "^1.2.0" @@ -23427,6 +29051,8 @@ }, "loader-utils": { "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz", + "integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==", "dev": true, "requires": { "big.js": "^5.2.2", @@ -23436,10 +29062,14 @@ }, "picocolors": { "version": "0.2.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", "dev": true }, "postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, "requires": { "picocolors": "^0.2.1", @@ -23448,12 +29078,16 @@ }, "semver": { "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true } } }, "css-minimizer-webpack-plugin": { "version": "3.4.1", + "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz", + "integrity": "sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q==", "requires": { "cssnano": "^5.0.6", "jest-worker": "^27.0.2", @@ -23465,6 +29099,8 @@ "dependencies": { "ajv": { "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -23474,12 +29110,16 @@ }, "ajv-keywords": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, "schema-utils": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -23490,20 +29130,21 @@ } }, "css-select": { - "version": "4.3.0", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", "requires": { "boolbase": "^1.0.0", - "css-what": "^6.0.1", - "domhandler": "^4.3.1", - "domutils": "^2.8.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", "nth-check": "^2.0.1" } }, - "css-select-base-adapter": { - "version": "0.1.1" - }, "css-to-react-native": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.0.0.tgz", + "integrity": "sha512-Ro1yETZA813eoyUp2GDBhG2j+YggidUmzO1/v9eYBKR2EHVEniE2MI/NqpTQ954BMpTPZFsGNPm46qFB9dpaPQ==", "requires": { "camelize": "^1.0.0", "css-color-keywords": "^1.0.0", @@ -23511,35 +29152,47 @@ } }, "css-tree": { - "version": "1.0.0-alpha.37", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz", + "integrity": "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==", "requires": { - "mdn-data": "2.0.4", + "mdn-data": "2.0.14", "source-map": "^0.6.1" } }, "css-what": { - "version": "6.1.0" + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==" }, "css.escape": { "version": "1.5.1", + "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", + "integrity": "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==", "dev": true }, "cssesc": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" }, "cssnano": { - "version": "5.1.7", + "version": "5.1.13", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.1.13.tgz", + "integrity": "sha512-S2SL2ekdEz6w6a2epXn4CmMKU4K3KpcyXLKfAYc9UQQqJRkD/2eLUG0vJ3Db/9OvO5GuAdgXw3pFbR6abqghDQ==", "requires": { - "cssnano-preset-default": "^5.2.7", + "cssnano-preset-default": "^5.2.12", "lilconfig": "^2.0.3", "yaml": "^1.10.2" } }, "cssnano-preset-advanced": { - "version": "5.3.3", + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.8.tgz", + "integrity": "sha512-xUlLLnEB1LjpEik+zgRNlk8Y/koBPPtONZjp7JKbXigeAmCrFvq9H0pXW5jJV45bQWAlmJ0sKy+IMr0XxLYQZg==", "requires": { "autoprefixer": "^10.3.7", - "cssnano-preset-default": "^5.2.7", + "cssnano-preset-default": "^5.2.12", "postcss-discard-unused": "^5.1.0", "postcss-merge-idents": "^5.1.1", "postcss-reduce-idents": "^5.2.0", @@ -23547,33 +29200,35 @@ } }, "cssnano-preset-default": { - "version": "5.2.7", + "version": "5.2.12", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz", + "integrity": "sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew==", "requires": { - "css-declaration-sorter": "^6.2.2", + "css-declaration-sorter": "^6.3.0", "cssnano-utils": "^3.1.0", "postcss-calc": "^8.2.3", "postcss-colormin": "^5.3.0", - "postcss-convert-values": "^5.1.0", - "postcss-discard-comments": "^5.1.1", + "postcss-convert-values": "^5.1.2", + "postcss-discard-comments": "^5.1.2", "postcss-discard-duplicates": "^5.1.0", "postcss-discard-empty": "^5.1.1", "postcss-discard-overridden": "^5.1.0", - "postcss-merge-longhand": "^5.1.4", - "postcss-merge-rules": "^5.1.1", + "postcss-merge-longhand": "^5.1.6", + "postcss-merge-rules": "^5.1.2", "postcss-minify-font-values": "^5.1.0", "postcss-minify-gradients": "^5.1.1", - "postcss-minify-params": "^5.1.2", - "postcss-minify-selectors": "^5.2.0", + "postcss-minify-params": "^5.1.3", + "postcss-minify-selectors": "^5.2.1", "postcss-normalize-charset": "^5.1.0", "postcss-normalize-display-values": "^5.1.0", - "postcss-normalize-positions": "^5.1.0", - "postcss-normalize-repeat-style": "^5.1.0", + "postcss-normalize-positions": "^5.1.1", + "postcss-normalize-repeat-style": "^5.1.1", "postcss-normalize-string": "^5.1.0", "postcss-normalize-timing-functions": "^5.1.0", "postcss-normalize-unicode": "^5.1.0", "postcss-normalize-url": "^5.1.0", "postcss-normalize-whitespace": "^5.1.1", - "postcss-ordered-values": "^5.1.1", + "postcss-ordered-values": "^5.1.3", "postcss-reduce-initial": "^5.1.0", "postcss-reduce-transforms": "^5.1.0", "postcss-svgo": "^5.1.0", @@ -23582,32 +29237,28 @@ }, "cssnano-utils": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz", + "integrity": "sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA==", "requires": {} }, "csso": { "version": "4.2.0", + "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", + "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", "requires": { "css-tree": "^1.1.2" - }, - "dependencies": { - "css-tree": { - "version": "1.1.3", - "requires": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" - } - }, - "mdn-data": { - "version": "2.0.14" - } } }, "cssom": { "version": "0.4.4", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.4.4.tgz", + "integrity": "sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw==", "dev": true }, "cssstyle": { "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz", + "integrity": "sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==", "dev": true, "requires": { "cssom": "~0.3.6" @@ -23615,15 +29266,160 @@ "dependencies": { "cssom": { "version": "0.3.8", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz", + "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", "dev": true } } }, "csstype": { - "version": "3.0.11" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", + "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==" + }, + "cypress": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-10.10.0.tgz", + "integrity": "sha512-bU8r44x1NIYAUNNXt3CwJpLOVth7HUv2hUhYCxZmgZ1IugowDvuHNpevnoZRQx1KKOEisLvIJW+Xen5Pjn41pg==", + "dev": true, + "requires": { + "@cypress/request": "^2.88.10", + "@cypress/xvfb": "^1.2.4", + "@types/node": "^14.14.31", + "@types/sinonjs__fake-timers": "8.1.1", + "@types/sizzle": "^2.3.2", + "arch": "^2.2.0", + "blob-util": "^2.0.2", + "bluebird": "^3.7.2", + "buffer": "^5.6.0", + "cachedir": "^2.3.0", + "chalk": "^4.1.0", + "check-more-types": "^2.24.0", + "cli-cursor": "^3.1.0", + "cli-table3": "~0.6.1", + "commander": "^5.1.0", + "common-tags": "^1.8.0", + "dayjs": "^1.10.4", + "debug": "^4.3.2", + "enquirer": "^2.3.6", + "eventemitter2": "6.4.7", + "execa": "4.1.0", + "executable": "^4.1.1", + "extract-zip": "2.0.1", + "figures": "^3.2.0", + "fs-extra": "^9.1.0", + "getos": "^3.2.1", + "is-ci": "^3.0.0", + "is-installed-globally": "~0.4.0", + "lazy-ass": "^1.6.0", + "listr2": "^3.8.3", + "lodash": "^4.17.21", + "log-symbols": "^4.0.0", + "minimist": "^1.2.6", + "ospath": "^1.2.2", + "pretty-bytes": "^5.6.0", + "proxy-from-env": "1.0.0", + "request-progress": "^3.0.0", + "semver": "^7.3.2", + "supports-color": "^8.1.1", + "tmp": "~0.2.1", + "untildify": "^4.0.0", + "yauzl": "^2.10.0" + }, + "dependencies": { + "@types/node": { + "version": "14.18.32", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.32.tgz", + "integrity": "sha512-Y6S38pFr04yb13qqHf8uk1nHE3lXgQ30WZbv1mLliV9pt0NjvqdWttLcrOYLnXbOafknVYRHZGoMSpR9UwfYow==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "requires": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } }, "data-urls": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz", + "integrity": "sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==", "dev": true, "requires": { "abab": "^2.0.3", @@ -23631,64 +29427,120 @@ "whatwg-url": "^8.0.0" } }, + "dayjs": { + "version": "1.11.5", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.5.tgz", + "integrity": "sha512-CAdX5Q3YW3Gclyo5Vpqkgpj8fSdLQcRuzfX6mC6Phy0nfJ0eGYOeS7m4mt2plDWLAtA4TqTakvbboHvUxfe4iA==", + "dev": true + }, "debug": { "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "requires": { "ms": "2.1.2" } }, "decimal.js": { - "version": "10.3.1", + "version": "10.4.2", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.2.tgz", + "integrity": "sha512-ic1yEvwT6GuvaYwBLLY6/aFFgjZdySKTE8en/fkU3QICTmRtgtSlFn0u0BXN06InZwtfCelR7j8LRiDI/02iGA==", "dev": true }, "decko": { - "version": "1.2.0" - }, - "decode-uri-component": { - "version": "0.2.0", - "dev": true + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decko/-/decko-1.2.0.tgz", + "integrity": "sha512-m8FnyHXV1QX+S1cl+KPFDIl6NMkxtKsy6+U/aYyjrOqWMuwAwYWu7ePqrsUHtDR5Y8Yk2pi/KIDSgF+vT4cPOQ==" }, "decompress-response": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", "requires": { "mimic-response": "^3.1.0" } }, "dedent": { "version": "0.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==", "dev": true }, "deep-extend": { - "version": "0.6.0" + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" }, "deep-is": { "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, "deepmerge": { - "version": "4.2.2" + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", + "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==" }, "default-gateway": { "version": "6.0.3", + "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", + "integrity": "sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==", "requires": { "execa": "^5.0.0" + }, + "dependencies": { + "execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + } + }, + "get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==" + }, + "human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==" + } } }, "defer-to-connect": { - "version": "1.1.3" + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", + "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==" }, "define-lazy-prop": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==" }, "define-properties": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", "requires": { "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" } }, "del": { - "version": "6.0.0", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz", + "integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==", "requires": { "globby": "^11.0.1", "graceful-fs": "^4.2.4", @@ -23702,60 +29554,65 @@ }, "delayed-stream": { "version": "1.0.0", - "dev": true - }, - "delegates": { - "version": "1.0.0" + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, "depd": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" }, "des.js": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz", + "integrity": "sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==", "requires": { "inherits": "^2.0.1", "minimalistic-assert": "^1.0.0" } }, "destroy": { - "version": "1.2.0" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" }, "detab": { "version": "2.0.4", + "resolved": "https://registry.npmjs.org/detab/-/detab-2.0.4.tgz", + "integrity": "sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g==", "requires": { "repeat-string": "^1.5.4" } }, "detect-libc": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", + "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==" }, "detect-newline": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", "dev": true }, "detect-node": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", + "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" }, "detect-port": { - "version": "1.3.0", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.5.1.tgz", + "integrity": "sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ==", "requires": { "address": "^1.0.1", - "debug": "^2.6.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "requires": { - "ms": "2.0.0" - } - }, - "ms": { - "version": "2.0.0" - } + "debug": "4" } }, "detect-port-alt": { "version": "1.1.6", + "resolved": "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz", + "integrity": "sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==", "requires": { "address": "^1.0.1", "debug": "^2.6.0" @@ -23763,21 +29620,29 @@ "dependencies": { "debug": { "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, "diff-sequences": { - "version": "27.5.1", + "version": "29.2.0", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.2.0.tgz", + "integrity": "sha512-413SY5JpYeSBZxmenGEmCVQ8mCgtFJF0w9PROdaS6z987XC2Pd2GOKqOITLtMftmyFZqgtCOb/QA7/Z3ZXfzIw==", "dev": true }, "diffie-hellman": { "version": "5.0.3", + "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", + "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", "requires": { "bn.js": "^4.1.0", "miller-rabin": "^4.0.0", @@ -23785,54 +29650,84 @@ }, "dependencies": { "bn.js": { - "version": "4.12.0" + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" } } }, "dir-glob": { "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", "requires": { "path-type": "^4.0.0" } }, "dns-equal": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", + "integrity": "sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg==" }, "dns-packet": { - "version": "5.3.1", + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.4.0.tgz", + "integrity": "sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g==", "requires": { "@leichtgewicht/ip-codec": "^2.0.1" } }, "dom-accessibility-api": { "version": "0.5.14", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz", + "integrity": "sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg==", "dev": true }, "dom-converter": { "version": "0.2.0", + "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", + "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", "requires": { "utila": "~0.4" } }, "dom-serializer": { - "version": "1.4.1", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "requires": { - "domelementtype": "^2.0.1", - "domhandler": "^4.2.0", - "entities": "^2.0.0" + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" } }, "dom-walk": { - "version": "0.1.2" + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", + "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" + }, + "dom7": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/dom7/-/dom7-4.0.4.tgz", + "integrity": "sha512-DSSgBzQ4rJWQp1u6o+3FVwMNnT5bzQbMb+o31TjYYeRi05uAcpF8koxdfzeoe5ElzPmua7W7N28YJhF7iEKqIw==", + "requires": { + "ssr-window": "^4.0.0" + } }, "domain-browser": { - "version": "4.22.0" + "version": "4.22.0", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-4.22.0.tgz", + "integrity": "sha512-IGBwjF7tNk3cwypFNH/7bfzBcgSCbaMOD3GsaY1AU/JRrnHnYgEM0+9kQt52iZxjNsjBtJYtao146V+f8jFZNw==" }, "domelementtype": { - "version": "2.3.0" + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" }, "domexception": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz", + "integrity": "sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==", "dev": true, "requires": { "webidl-conversions": "^5.0.0" @@ -23840,29 +29735,39 @@ "dependencies": { "webidl-conversions": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", + "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", "dev": true } } }, "domhandler": { - "version": "4.3.1", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "requires": { - "domelementtype": "^2.2.0" + "domelementtype": "^2.3.0" } }, "dompurify": { - "version": "2.3.6" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.4.0.tgz", + "integrity": "sha512-Be9tbQMZds4a3C6xTmz68NlMfeONA//4dOavl/1rNw50E+/QO0KVpbcU0PcaW0nsQxurXls9ZocqFxk8R2mWEA==" }, "domutils": { - "version": "2.8.0", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", + "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", "requires": { - "dom-serializer": "^1.0.1", - "domelementtype": "^2.2.0", - "domhandler": "^4.2.0" + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.1" } }, "dot-case": { "version": "3.0.4", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", "requires": { "no-case": "^3.0.4", "tslib": "^2.0.3" @@ -23870,36 +29775,64 @@ }, "dot-prop": { "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", "requires": { "is-obj": "^2.0.0" }, "dependencies": { "is-obj": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" } } }, "dotenv": { "version": "10.0.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-10.0.0.tgz", + "integrity": "sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==", "dev": true }, "duplexer": { - "version": "0.1.2" + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", + "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" }, "duplexer3": { - "version": "0.1.4" + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz", + "integrity": "sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==" }, "eastasianwidth": { - "version": "0.2.0" + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", + "dev": true, + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } }, "ee-first": { - "version": "1.1.1" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "electron-to-chromium": { - "version": "1.4.129" + "version": "1.4.284", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz", + "integrity": "sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA==" }, "elliptic": { "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", "requires": { "bn.js": "^4.11.9", "brorand": "^1.1.0", @@ -23911,78 +29844,117 @@ }, "dependencies": { "bn.js": { - "version": "4.12.0" + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" } } }, "emittery": { "version": "0.8.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.8.1.tgz", + "integrity": "sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg==", "dev": true }, "emoji-regex": { - "version": "9.2.2" + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" }, "emojis-list": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==" }, "emoticon": { - "version": "3.2.0" + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/emoticon/-/emoticon-3.2.0.tgz", + "integrity": "sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg==" }, "encodeurl": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" }, "end-of-stream": { "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "requires": { "once": "^1.4.0" } }, "enhanced-resolve": { - "version": "5.9.3", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz", + "integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==", "requires": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, + "enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "requires": { + "ansi-colors": "^4.1.1" + } + }, "entities": { - "version": "2.2.0" + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz", + "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==" }, "error-ex": { "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", "requires": { "is-arrayish": "^0.2.1" } }, "es-abstract": { - "version": "1.19.5", + "version": "1.20.4", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.4.tgz", + "integrity": "sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA==", "requires": { "call-bind": "^1.0.2", "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", - "get-intrinsic": "^1.1.1", + "function.prototype.name": "^1.1.5", + "get-intrinsic": "^1.1.3", "get-symbol-description": "^1.0.0", "has": "^1.0.3", + "has-property-descriptors": "^1.0.0", "has-symbols": "^1.0.3", "internal-slot": "^1.0.3", - "is-callable": "^1.2.4", + "is-callable": "^1.2.7", "is-negative-zero": "^2.0.2", "is-regex": "^1.1.4", "is-shared-array-buffer": "^1.0.2", "is-string": "^1.0.7", "is-weakref": "^1.0.2", - "object-inspect": "^1.12.0", + "object-inspect": "^1.12.2", "object-keys": "^1.1.1", - "object.assign": "^4.1.2", - "string.prototype.trimend": "^1.0.4", - "string.prototype.trimstart": "^1.0.4", - "unbox-primitive": "^1.0.1" + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.4.3", + "safe-regex-test": "^1.0.0", + "string.prototype.trimend": "^1.0.5", + "string.prototype.trimstart": "^1.0.5", + "unbox-primitive": "^1.0.2" } }, "es-module-lexer": { - "version": "0.9.3" + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", + "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==" }, "es-to-primitive": { "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", "requires": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -23990,25 +29962,39 @@ } }, "es6-object-assign": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es6-object-assign/-/es6-object-assign-1.1.0.tgz", + "integrity": "sha512-MEl9uirslVwqQU369iHNWZXsI8yaZYGg/D65aOgZkeyFJwHYSxilf7rQzXKI7DdDuBPrBXbfk3sl9hJhmd5AUw==" }, "es6-promise": { - "version": "3.3.1" + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", + "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==" }, "escalade": { - "version": "3.1.1" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" }, "escape-goat": { - "version": "2.1.1" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz", + "integrity": "sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==" }, "escape-html": { - "version": "1.0.3" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, "escape-string-regexp": { - "version": "1.0.5" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" }, "escodegen": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", + "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", "dev": true, "requires": { "esprima": "^4.0.1", @@ -24020,97 +30006,150 @@ }, "eslint-scope": { "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "requires": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" }, "dependencies": { "estraverse": { - "version": "4.3.0" + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" } } }, "esprima": { - "version": "4.0.1" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" }, "esrecurse": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "requires": { "estraverse": "^5.2.0" } }, "estraverse": { - "version": "5.3.0" + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" }, "esutils": { - "version": "2.0.3" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" }, "eta": { - "version": "1.12.3" + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/eta/-/eta-1.12.3.tgz", + "integrity": "sha512-qHixwbDLtekO/d51Yr4glcaUJCIjGVJyTzuqV4GPlgZo1YpgOKG+avQynErZIYrfM6JIJdtiG2Kox8tbb+DoGg==" }, "etag": { - "version": "1.8.1" + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==" }, "eval": { "version": "0.1.8", + "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", + "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", "requires": { "@types/node": "*", "require-like": ">= 0.1.1" } }, + "eventemitter2": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.7.tgz", + "integrity": "sha512-tYUSVOGeQPKt/eC1ABfhHy5Xd96N3oIijJvN3O9+TsC28T5V9yX9oEfEK5faP0EFSNVOG97qtAS68GBrQB2hDg==", + "dev": true + }, "eventemitter3": { - "version": "4.0.7" + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, "events": { - "version": "3.3.0" + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" }, "evp_bytestokey": { "version": "1.0.3", + "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", + "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", "requires": { "md5.js": "^1.3.4", "safe-buffer": "^5.1.1" } }, "execa": { - "version": "5.1.1", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", + "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", + "dev": true, "requires": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "signal-exit": "^3.0.2", "strip-final-newline": "^2.0.0" } }, + "executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dev": true, + "requires": { + "pify": "^2.2.0" + } + }, "exif-parser": { - "version": "0.1.12" + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz", + "integrity": "sha512-c2bQfLNbMzLPmzQuOr8fy0csy84WmwnER81W88DzTp9CYNPJ6yzOj2EZAh9pywYpqHnshVLHQJ8WzldAyfY+Iw==" }, "exit": { "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", "dev": true }, "expand-template": { - "version": "2.0.3" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==" }, "expect": { - "version": "27.5.1", + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.2.1.tgz", + "integrity": "sha512-BJtA754Fba0YWRWHgjKUMTA3ltWarKgITXHQnbZ2mTxTXC4yMQlR0FI7HkB3fJYkhWBf4qjNiqvg3LDtXCcVRQ==", "dev": true, "requires": { - "@jest/types": "^27.5.1", - "jest-get-type": "^27.5.1", - "jest-matcher-utils": "^27.5.1", - "jest-message-util": "^27.5.1" + "@jest/expect-utils": "^29.2.1", + "jest-get-type": "^29.2.0", + "jest-matcher-utils": "^29.2.1", + "jest-message-util": "^29.2.1", + "jest-util": "^29.2.1" } }, "express": { - "version": "4.18.1", + "version": "4.18.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", + "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", "requires": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.0", + "body-parser": "1.20.1", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.5.0", @@ -24129,7 +30168,7 @@ "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", - "qs": "6.10.3", + "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", @@ -24142,48 +30181,91 @@ }, "dependencies": { "array-flatten": { - "version": "1.1.1" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, "content-disposition": { "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", "requires": { "safe-buffer": "5.2.1" } }, "debug": { "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "path-to-regexp": { - "version": "0.1.7" + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, - "range-parser": { - "version": "1.2.1" + "qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "requires": { + "side-channel": "^1.0.4" + } }, - "safe-buffer": { - "version": "5.2.1" + "range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" } } }, "extend": { - "version": "3.0.2" + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "extend-shallow": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + }, + "extract-zip": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", + "dev": true, "requires": { - "is-extendable": "^0.1.0" + "@types/yauzl": "^2.9.1", + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" } }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", + "dev": true + }, "fast-deep-equal": { - "version": "3.1.3" + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, "fast-glob": { - "version": "3.2.11", + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", "requires": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", @@ -24193,40 +30275,56 @@ } }, "fast-json-stable-stringify": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, "fast-levenshtein": { "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, "fast-safe-stringify": { - "version": "2.1.1" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, "fast-url-parser": { "version": "1.1.3", + "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", + "integrity": "sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==", "requires": { "punycode": "^1.3.2" }, "dependencies": { "punycode": { - "version": "1.4.1" + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==" } } }, "fastq": { "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", "requires": { "reusify": "^1.0.4" } }, "faye-websocket": { "version": "0.11.4", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", + "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", "requires": { "websocket-driver": ">=0.5.1" } }, "fb-watchman": { - "version": "2.0.1", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", "dev": true, "requires": { "bser": "2.1.1" @@ -24234,12 +30332,16 @@ }, "fbemitter": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/fbemitter/-/fbemitter-3.0.0.tgz", + "integrity": "sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw==", "requires": { "fbjs": "^3.0.0" } }, "fbjs": { "version": "3.0.4", + "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-3.0.4.tgz", + "integrity": "sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ==", "requires": { "cross-fetch": "^3.1.5", "fbjs-css-vars": "^1.0.0", @@ -24251,16 +30353,40 @@ } }, "fbjs-css-vars": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz", + "integrity": "sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==" + }, + "fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "requires": { + "pend": "~1.2.0" + } }, "feed": { "version": "4.2.2", + "resolved": "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz", + "integrity": "sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==", "requires": { "xml-js": "^1.6.11" } }, + "figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, "file-loader": { "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", + "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", "requires": { "loader-utils": "^2.0.0", "schema-utils": "^3.0.0" @@ -24268,6 +30394,8 @@ "dependencies": { "schema-utils": { "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "requires": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", @@ -24277,22 +30405,32 @@ } }, "file-type": { - "version": "9.0.0" + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-9.0.0.tgz", + "integrity": "sha512-Qe/5NJrgIOlwijpq3B7BEpzPFcgzggOTagZmkXQY4LA6bsXKTUstK7Wp12lEJ/mLKTpvIZxmIuRcLYWT6ov9lw==" }, "filesize": { - "version": "8.0.7" + "version": "8.0.7", + "resolved": "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz", + "integrity": "sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ==" }, "fill-range": { "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "requires": { "to-regex-range": "^5.0.1" } }, "filter-obj": { - "version": "2.0.2" + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-2.0.2.tgz", + "integrity": "sha512-lO3ttPjHZRfjMcxWKb1j1eDhTFsu4meeR3lnMcnBFhk6RuLhvEiuALu2TlfL310ph4lCYYwgF/ElIjdP739tdg==" }, "finalhandler": { "version": "1.2.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", + "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", "requires": { "debug": "2.6.9", "encodeurl": "~1.0.2", @@ -24305,17 +30443,23 @@ "dependencies": { "debug": { "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "ms": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, "find-cache-dir": { "version": "3.3.2", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", + "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", "requires": { "commondir": "^1.0.1", "make-dir": "^3.0.2", @@ -24324,6 +30468,8 @@ }, "find-up": { "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "requires": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -24331,19 +30477,41 @@ }, "flux": { "version": "4.0.3", + "resolved": "https://registry.npmjs.org/flux/-/flux-4.0.3.tgz", + "integrity": "sha512-yKAbrp7JhZhj6uiT1FTuVMlIAT1J4jqEyBpFApi1kxpGZCvacMVc/t1pMQyotqHhAgvoE3bNvAykhCo2CLjnYw==", "requires": { "fbemitter": "^3.0.0", "fbjs": "^3.0.1" } }, "follow-redirects": { - "version": "1.14.9" + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", + "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==" + }, + "for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "requires": { + "is-callable": "^1.1.3" + } }, "foreach": { - "version": "2.0.5" + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.6.tgz", + "integrity": "sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==" + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", + "dev": true }, "fork-ts-checker-webpack-plugin": { "version": "6.5.2", + "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz", + "integrity": "sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA==", "requires": { "@babel/code-frame": "^7.8.3", "@types/json-schema": "^7.0.5", @@ -24362,12 +30530,16 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -24375,15 +30547,21 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "cosmiconfig": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", + "integrity": "sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==", "requires": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.1.0", @@ -24394,6 +30572,8 @@ }, "fs-extra": { "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", "requires": { "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", @@ -24402,10 +30582,14 @@ } }, "has-flag": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "schema-utils": { "version": "2.7.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz", + "integrity": "sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A==", "requires": { "@types/json-schema": "^7.0.4", "ajv": "^6.12.2", @@ -24414,18 +30598,23 @@ }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "requires": { "has-flag": "^4.0.0" } }, "tapable": { - "version": "1.1.3" + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", + "integrity": "sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==" } } }, "form-data": { - "version": "3.0.1", - "dev": true, + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -24433,22 +30622,34 @@ } }, "forwarded": { - "version": "0.2.0" + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==" }, "fraction.js": { - "version": "4.2.0" + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", + "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==" }, "fresh": { - "version": "0.5.2" + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, "fs": { - "version": "0.0.2" + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.2.tgz", + "integrity": "sha512-YAiVokMCrSIFZiroB1oz51hPiPRVcUtSa4x2U5RYXyhS9VAPdiFigKbPTnOSq7XY8wd3FIVPYmXpo5lMzFmxgg==" }, "fs-constants": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "fs-extra": { "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", "requires": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", @@ -24456,122 +30657,157 @@ } }, "fs-monkey": { - "version": "1.0.3" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz", + "integrity": "sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q==" }, "fs.realpath": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "fsevents": { "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", "optional": true }, "function-bind": { - "version": "1.1.1" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, - "gauge": { - "version": "2.7.4", + "function.prototype.name": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", + "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", "requires": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1" - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "string-width": { - "version": "1.0.2", - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "requires": { - "ansi-regex": "^2.0.0" - } - } + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0", + "functions-have-names": "^1.2.2" } }, + "functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==" + }, "gensync": { - "version": "1.0.0-beta.2" + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==" }, "get-caller-file": { - "version": "2.0.5" + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" }, "get-intrinsic": { - "version": "1.1.1", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", "requires": { "function-bind": "^1.1.1", "has": "^1.0.3", - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.3" } }, "get-own-enumerable-property-symbols": { - "version": "3.0.2" + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", + "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==" }, "get-package-type": { "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true }, "get-stream": { - "version": "6.0.1" + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "requires": { + "pump": "^3.0.0" + } }, "get-symbol-description": { "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", "requires": { "call-bind": "^1.0.2", "get-intrinsic": "^1.1.1" } }, + "getos": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", + "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", + "dev": true, + "requires": { + "async": "^3.2.0" + } + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, "gifwrap": { "version": "0.9.4", + "resolved": "https://registry.npmjs.org/gifwrap/-/gifwrap-0.9.4.tgz", + "integrity": "sha512-MDMwbhASQuVeD4JKd1fKgNgCRL3fGqMM4WaqpNhWO0JiMOAjbQdumbs4BbBZEy9/M00EHEjKN3HieVhCUlwjeQ==", "requires": { "image-q": "^4.0.0", "omggif": "^1.0.10" } }, "github-from-package": { - "version": "0.0.0" + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==" }, "github-slugger": { - "version": "1.4.0" + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.4.0.tgz", + "integrity": "sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ==" }, "glob": { - "version": "7.2.0", + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", + "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "glob-parent": { "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "requires": { "is-glob": "^4.0.1" } }, "glob-to-regexp": { - "version": "0.4.1" + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" }, "global": { "version": "4.4.0", + "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", + "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", "requires": { "min-document": "^2.19.0", "process": "^0.11.10" @@ -24579,31 +30815,39 @@ }, "global-dirs": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", + "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", "requires": { "ini": "2.0.0" - }, - "dependencies": { - "ini": { - "version": "2.0.0" - } } }, "global-modules": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", + "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", "requires": { "global-prefix": "^3.0.0" } }, "global-prefix": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", + "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", "requires": { "ini": "^1.3.5", "kind-of": "^6.0.2", "which": "^1.3.1" }, "dependencies": { + "ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, "which": { "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", "requires": { "isexe": "^2.0.0" } @@ -24611,10 +30855,14 @@ } }, "globals": { - "version": "11.12.0" + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" }, "globby": { "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "requires": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", @@ -24626,6 +30874,8 @@ }, "got": { "version": "9.6.0", + "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", + "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", "requires": { "@sindresorhus/is": "^0.14.0", "@szmarczak/http-timer": "^1.1.2", @@ -24642,26 +30892,36 @@ "dependencies": { "decompress-response": { "version": "3.3.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", "requires": { "mimic-response": "^1.0.0" } }, "get-stream": { "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", "requires": { "pump": "^3.0.0" } }, "mimic-response": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==" } } }, "graceful-fs": { - "version": "4.2.10" + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" }, "gray-matter": { "version": "4.0.3", + "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", + "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", "requires": { "js-yaml": "^3.13.1", "kind-of": "^6.0.2", @@ -24671,12 +30931,16 @@ "dependencies": { "argparse": { "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "requires": { "sprintf-js": "~1.0.2" } }, "js-yaml": { "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "requires": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -24686,61 +30950,75 @@ }, "gzip-size": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz", + "integrity": "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==", "requires": { "duplexer": "^0.1.2" } }, "handle-thing": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", + "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==" }, "has": { "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", "requires": { "function-bind": "^1.1.1" } }, "has-bigints": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==" }, "has-flag": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, "has-property-descriptors": { "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", "requires": { "get-intrinsic": "^1.1.1" } }, "has-symbols": { - "version": "1.0.3" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" }, "has-tostringtag": { "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", "requires": { "has-symbols": "^1.0.2" } }, - "has-unicode": { - "version": "2.0.1" - }, "has-yarn": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz", + "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==" }, "hash-base": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", "requires": { "inherits": "^2.0.4", "readable-stream": "^3.6.0", "safe-buffer": "^5.2.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1" - } } }, "hash.js": { "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", "requires": { "inherits": "^2.0.3", "minimalistic-assert": "^1.0.1" @@ -24748,6 +31026,8 @@ }, "hast-to-hyperscript": { "version": "9.0.1", + "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz", + "integrity": "sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==", "requires": { "@types/unist": "^2.0.3", "comma-separated-tokens": "^1.0.0", @@ -24760,6 +31040,8 @@ }, "hast-util-from-parse5": { "version": "6.0.1", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz", + "integrity": "sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==", "requires": { "@types/parse5": "^5.0.0", "hastscript": "^6.0.0", @@ -24769,11 +31051,20 @@ "web-namespaces": "^1.0.0" } }, + "hast-util-is-element": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz", + "integrity": "sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ==" + }, "hast-util-parse-selector": { - "version": "2.2.5" + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", + "integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==" }, "hast-util-raw": { "version": "6.0.1", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.0.1.tgz", + "integrity": "sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig==", "requires": { "@types/hast": "^2.0.0", "hast-util-from-parse5": "^6.0.0", @@ -24785,10 +31076,19 @@ "web-namespaces": "^1.0.0", "xtend": "^4.0.0", "zwitch": "^1.0.0" + }, + "dependencies": { + "parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" + } } }, "hast-util-to-parse5": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", + "integrity": "sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==", "requires": { "hast-to-hyperscript": "^9.0.0", "property-information": "^5.0.0", @@ -24797,8 +31097,20 @@ "zwitch": "^1.0.0" } }, + "hast-util-to-text": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hast-util-to-text/-/hast-util-to-text-2.0.1.tgz", + "integrity": "sha512-8nsgCARfs6VkwH2jJU9b8LNTuR4700na+0h3PqCaEk4MAnMDeu5P0tP8mjk9LLNGxIeQRLbiDbZVw6rku+pYsQ==", + "requires": { + "hast-util-is-element": "^1.0.0", + "repeat-string": "^1.0.0", + "unist-util-find-after": "^3.0.0" + } + }, "hastscript": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", + "integrity": "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==", "requires": { "@types/hast": "^2.0.0", "comma-separated-tokens": "^1.0.0", @@ -24808,10 +31120,14 @@ } }, "he": { - "version": "1.2.0" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==" }, "history": { "version": "4.10.1", + "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", + "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", "requires": { "@babel/runtime": "^7.1.2", "loose-envify": "^1.2.0", @@ -24823,6 +31139,8 @@ }, "hmac-drbg": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==", "requires": { "hash.js": "^1.0.3", "minimalistic-assert": "^1.0.0", @@ -24831,17 +31149,23 @@ }, "hoist-non-react-statics": { "version": "3.3.2", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", "requires": { "react-is": "^16.7.0" }, "dependencies": { "react-is": { - "version": "16.13.1" + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" } } }, "hpack.js": { "version": "2.1.6", + "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", + "integrity": "sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==", "requires": { "inherits": "^2.0.1", "obuf": "^1.0.0", @@ -24850,10 +31174,14 @@ }, "dependencies": { "isarray": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, "readable-stream": { "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -24864,8 +31192,15 @@ "util-deprecate": "~1.0.1" } }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, "string_decoder": { "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" } @@ -24874,20 +31209,28 @@ }, "html-encoding-sniffer": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", + "integrity": "sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==", "dev": true, "requires": { "whatwg-encoding": "^1.0.5" } }, "html-entities": { - "version": "2.3.3" + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.3.3.tgz", + "integrity": "sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA==" }, "html-escaper": { "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", "dev": true }, "html-minifier-terser": { "version": "6.1.0", + "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", + "integrity": "sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==", "requires": { "camel-case": "^4.1.2", "clean-css": "^5.2.2", @@ -24899,18 +31242,26 @@ }, "dependencies": { "commander": { - "version": "8.3.0" + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==" } } }, "html-tags": { - "version": "3.2.0" + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.2.0.tgz", + "integrity": "sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg==" }, "html-void-elements": { - "version": "1.0.5" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz", + "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==" }, "html-webpack-plugin": { "version": "5.5.0", + "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz", + "integrity": "sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw==", "requires": { "@types/html-minifier-terser": "^6.0.0", "html-minifier-terser": "^6.0.2", @@ -24920,22 +31271,30 @@ } }, "htmlparser2": { - "version": "6.1.0", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz", + "integrity": "sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==", "requires": { - "domelementtype": "^2.0.1", - "domhandler": "^4.0.0", - "domutils": "^2.5.2", - "entities": "^2.0.0" + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "entities": "^4.3.0" } }, "http-cache-semantics": { - "version": "4.1.0" + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", + "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==" }, "http-deceiver": { - "version": "1.2.7" + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", + "integrity": "sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==" }, "http-errors": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", "requires": { "depd": "2.0.0", "inherits": "2.0.4", @@ -24945,10 +31304,14 @@ } }, "http-parser-js": { - "version": "0.5.6" + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz", + "integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==" }, "http-proxy": { "version": "1.18.1", + "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", + "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", "requires": { "eventemitter3": "^4.0.0", "follow-redirects": "^1.0.0", @@ -24957,6 +31320,8 @@ }, "http-proxy-agent": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", "dev": true, "requires": { "@tootallnate/once": "1", @@ -24966,6 +31331,8 @@ }, "http-proxy-middleware": { "version": "2.0.6", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", + "integrity": "sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==", "requires": { "@types/http-proxy": "^1.17.8", "http-proxy": "^1.18.1", @@ -24975,18 +31342,37 @@ }, "dependencies": { "is-plain-obj": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", + "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==" } } }, + "http-signature": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" + } + }, "http2-client": { - "version": "1.3.5" + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/http2-client/-/http2-client-1.3.5.tgz", + "integrity": "sha512-EC2utToWl4RKfs5zd36Mxq7nzHHBuomZboI0yYL6Y0RmBgT7Sgkq4rQ0ezFTYoIsSs7Tm9SJe+o2FcAg6GBhGA==" }, "https-browserify": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz", + "integrity": "sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg==" }, "https-proxy-agent": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "dev": true, "requires": { "agent-base": "6", @@ -24994,16 +31380,23 @@ } }, "human-signals": { - "version": "2.1.0" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", + "dev": true }, "iconv-lite": { "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "requires": { "safer-buffer": ">= 2.1.2 < 3" } }, "icss-utils": { "version": "4.1.1", + "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-4.1.1.tgz", + "integrity": "sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA==", "dev": true, "requires": { "postcss": "^7.0.14" @@ -25011,10 +31404,14 @@ "dependencies": { "picocolors": { "version": "0.2.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", "dev": true }, "postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, "requires": { "picocolors": "^0.2.1", @@ -25024,43 +31421,61 @@ } }, "ieee754": { - "version": "1.2.1" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" }, "ignore": { - "version": "5.2.0" + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==" }, "image-q": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/image-q/-/image-q-4.0.0.tgz", + "integrity": "sha512-PfJGVgIfKQJuq3s0tTDOKtztksibuUEbJQIYT3by6wctQo+Rdlh7ef4evJ5NCdxY4CfMbvFkocEwbl4BF8RlJw==", "requires": { "@types/node": "16.9.1" }, "dependencies": { "@types/node": { - "version": "16.9.1" + "version": "16.9.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz", + "integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g==" } } }, "image-size": { - "version": "1.0.1", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/image-size/-/image-size-1.0.2.tgz", + "integrity": "sha512-xfOoWjceHntRb3qFCrh5ZFORYH8XCdYpASltMhZ/Q0KZiOwjdE/Yl2QCiWdwD+lygV5bMCvauzgu5PxBX/Yerg==", "requires": { "queue": "6.0.2" } }, "immer": { - "version": "9.0.12" + "version": "9.0.15", + "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.15.tgz", + "integrity": "sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ==" }, "import-fresh": { "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "requires": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "import-lazy": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", + "integrity": "sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A==" }, "import-local": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", "dev": true, "requires": { "pkg-dir": "^4.2.0", @@ -25068,32 +31483,48 @@ } }, "imurmurhash": { - "version": "0.1.4" + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==" }, "indent-string": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" }, "infima": { - "version": "0.2.0-alpha.37" + "version": "0.2.0-alpha.37", + "resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.37.tgz", + "integrity": "sha512-4GX7Baw+/lwS4PPW/UJNY89tWSvYG1DL6baKVdpK6mC593iRgMssxNtORMTFArLPJ/A/lzsGhRmx+z6MaMxj0Q==" }, "inflight": { "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "requires": { "once": "^1.3.0", "wrappy": "1" } }, "inherits": { - "version": "2.0.4" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "ini": { - "version": "1.3.8" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==" }, "inline-style-parser": { - "version": "0.1.1" + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", + "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" }, "internal-slot": { "version": "1.0.3", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", + "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", "requires": { "get-intrinsic": "^1.1.0", "has": "^1.0.3", @@ -25101,22 +31532,32 @@ } }, "interpret": { - "version": "1.4.0" + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==" }, "invariant": { "version": "2.2.4", + "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", + "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", "requires": { "loose-envify": "^1.0.0" } }, "ipaddr.js": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz", + "integrity": "sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng==" }, "is-alphabetical": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", + "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==" }, "is-alphanumerical": { "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", + "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", "requires": { "is-alphabetical": "^1.0.0", "is-decimal": "^1.0.0" @@ -25124,101 +31565,139 @@ }, "is-arguments": { "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", "requires": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" } }, "is-arrayish": { - "version": "0.2.1" + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==" }, "is-bigint": { "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", "requires": { "has-bigints": "^1.0.1" } }, "is-binary-path": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "requires": { "binary-extensions": "^2.0.0" } }, "is-boolean-object": { "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", "requires": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" } }, "is-buffer": { - "version": "2.0.5" + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", + "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==" }, "is-callable": { - "version": "1.2.4" + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==" }, "is-ci": { - "version": "2.0.0", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dev": true, "requires": { - "ci-info": "^2.0.0" - }, - "dependencies": { - "ci-info": { - "version": "2.0.0" - } + "ci-info": "^3.2.0" } }, "is-core-module": { - "version": "2.9.0", + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.10.0.tgz", + "integrity": "sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==", "requires": { "has": "^1.0.3" } }, "is-date-object": { "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", "requires": { "has-tostringtag": "^1.0.0" } }, "is-decimal": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", + "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==" }, "is-docker": { - "version": "2.2.1" + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==" }, "is-extendable": { - "version": "0.1.1" + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" }, "is-extglob": { - "version": "2.1.1" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==" }, "is-fullwidth-code-point": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "is-function": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.2.tgz", + "integrity": "sha512-lw7DUp0aWXYg+CBCN+JKkcE0Q2RayZnSvnZBlwgxHBQhqt5pZNVy4Ri7H9GmmXkdu7LUthszM+Tor1u/2iBcpQ==" }, "is-generator-fn": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", "dev": true }, "is-generator-function": { "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", "requires": { "has-tostringtag": "^1.0.0" } }, "is-glob": { "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "requires": { "is-extglob": "^2.1.1" } }, "is-hexadecimal": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", + "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==" }, "is-installed-globally": { "version": "0.4.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", "requires": { "global-dirs": "^3.0.0", "is-path-inside": "^3.0.2" @@ -25226,131 +31705,203 @@ }, "is-nan": { "version": "1.3.2", + "resolved": "https://registry.npmjs.org/is-nan/-/is-nan-1.3.2.tgz", + "integrity": "sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==", "requires": { "call-bind": "^1.0.0", "define-properties": "^1.1.3" } }, "is-negative-zero": { - "version": "2.0.2" + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" }, "is-npm": { - "version": "5.0.0" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz", + "integrity": "sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==" }, "is-number": { - "version": "7.0.0" + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" }, "is-number-object": { "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", "requires": { "has-tostringtag": "^1.0.0" } }, "is-obj": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", + "integrity": "sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==" }, "is-path-cwd": { - "version": "2.2.0" + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", + "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==" }, "is-path-inside": { - "version": "3.0.3" + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==" }, "is-plain-obj": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==" }, "is-plain-object": { "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", "requires": { "isobject": "^3.0.1" } }, "is-potential-custom-element-name": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, "is-regex": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", "requires": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" } }, "is-regexp": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", + "integrity": "sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==" }, "is-root": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz", + "integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==" }, "is-shared-array-buffer": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", "requires": { "call-bind": "^1.0.2" } }, "is-stream": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==" }, "is-string": { "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", "requires": { "has-tostringtag": "^1.0.0" } }, "is-symbol": { "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "requires": { "has-symbols": "^1.0.2" } }, "is-typed-array": { - "version": "1.1.8", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.9.tgz", + "integrity": "sha512-kfrlnTTn8pZkfpJMUgYD7YZ3qzeJgWUn8XfVYBARc4wnmNOmLbmuuaAs3q5fvB0UJOn6yHAKaGTPM7d6ezoD/A==", "requires": { "available-typed-arrays": "^1.0.5", "call-bind": "^1.0.2", - "es-abstract": "^1.18.5", - "foreach": "^2.0.5", + "es-abstract": "^1.20.0", + "for-each": "^0.3.3", "has-tostringtag": "^1.0.0" } }, "is-typedarray": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" + }, + "is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true }, "is-weakref": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", "requires": { "call-bind": "^1.0.2" } }, "is-whitespace-character": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz", + "integrity": "sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w==" }, "is-word-character": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", + "integrity": "sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA==" }, "is-wsl": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", "requires": { "is-docker": "^2.0.0" } }, "is-yarn-global": { - "version": "0.3.0" + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", + "integrity": "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==" }, "isarray": { - "version": "0.0.1" + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" }, "isexe": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, "isobject": { - "version": "3.0.1" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==" + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", + "dev": true }, "istanbul-lib-coverage": { "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", + "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", "dev": true }, "istanbul-lib-instrument": { - "version": "5.2.0", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", "dev": true, "requires": { "@babel/core": "^7.12.3", @@ -25362,12 +31913,16 @@ "dependencies": { "semver": { "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true } } }, "istanbul-lib-report": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", "dev": true, "requires": { "istanbul-lib-coverage": "^3.0.0", @@ -25377,10 +31932,14 @@ "dependencies": { "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25390,6 +31949,8 @@ }, "istanbul-lib-source-maps": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", "dev": true, "requires": { "debug": "^4.1.1", @@ -25398,7 +31959,9 @@ } }, "istanbul-reports": { - "version": "3.1.4", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz", + "integrity": "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==", "dev": true, "requires": { "html-escaper": "^2.0.0", @@ -25407,6 +31970,8 @@ }, "jest": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz", + "integrity": "sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==", "dev": true, "requires": { "@jest/core": "^27.5.1", @@ -25416,15 +31981,50 @@ }, "jest-changed-files": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-27.5.1.tgz", + "integrity": "sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw==", "dev": true, "requires": { "@jest/types": "^27.5.1", "execa": "^5.0.0", "throat": "^6.0.1" + }, + "dependencies": { + "execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + } + }, + "get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true + }, + "human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true + } } }, "jest-circus": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-27.5.1.tgz", + "integrity": "sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw==", "dev": true, "requires": { "@jest/environment": "^27.5.1", @@ -25450,36 +32050,127 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", + "dev": true + }, + "expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", "dev": true, "requires": { - "color-convert": "^2.0.1" + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" } }, - "chalk": { - "version": "4.1.2", + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true + }, + "jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", "dev": true, "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" } }, - "color-convert": { - "version": "2.0.1", + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", "dev": true, "requires": { - "color-name": "~1.1.4" + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" } }, - "color-name": { - "version": "1.1.4", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "dev": true - }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25489,6 +32180,8 @@ }, "jest-cli": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-27.5.1.tgz", + "integrity": "sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw==", "dev": true, "requires": { "@jest/core": "^27.5.1", @@ -25507,6 +32200,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -25514,6 +32209,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -25522,6 +32219,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -25529,14 +32228,34 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25546,6 +32265,8 @@ }, "jest-config": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-27.5.1.tgz", + "integrity": "sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA==", "dev": true, "requires": { "@babel/core": "^7.8.0", @@ -25576,6 +32297,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -25583,6 +32306,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -25591,6 +32316,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -25598,14 +32325,40 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25614,17 +32367,21 @@ } }, "jest-diff": { - "version": "27.5.1", + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.2.1.tgz", + "integrity": "sha512-gfh/SMNlQmP3MOUgdzxPOd4XETDJifADpT937fN1iUGz+9DgOu2eUPHH25JDkLVcLwwqxv3GzVyK4VBUr9fjfA==", "dev": true, "requires": { "chalk": "^4.0.0", - "diff-sequences": "^27.5.1", - "jest-get-type": "^27.5.1", - "pretty-format": "^27.5.1" + "diff-sequences": "^29.2.0", + "jest-get-type": "^29.2.0", + "pretty-format": "^29.2.1" }, "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -25632,6 +32389,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -25640,6 +32399,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -25647,14 +32408,39 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "pretty-format": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.2.1.tgz", + "integrity": "sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA==", + "dev": true, + "requires": { + "@jest/schemas": "^29.0.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + } + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25664,6 +32450,8 @@ }, "jest-docblock": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-27.5.1.tgz", + "integrity": "sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ==", "dev": true, "requires": { "detect-newline": "^3.0.0" @@ -25671,6 +32459,8 @@ }, "jest-each": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-27.5.1.tgz", + "integrity": "sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ==", "dev": true, "requires": { "@jest/types": "^27.5.1", @@ -25682,6 +32472,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -25689,6 +32481,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -25697,6 +32491,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -25704,14 +32500,40 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", "dev": true }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25721,6 +32543,8 @@ }, "jest-environment-jsdom": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz", + "integrity": "sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw==", "dev": true, "requires": { "@jest/environment": "^27.5.1", @@ -25730,10 +32554,77 @@ "jest-mock": "^27.5.1", "jest-util": "^27.5.1", "jsdom": "^16.6.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "jest-environment-node": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-27.5.1.tgz", + "integrity": "sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw==", "dev": true, "requires": { "@jest/environment": "^27.5.1", @@ -25742,14 +32633,83 @@ "@types/node": "*", "jest-mock": "^27.5.1", "jest-util": "^27.5.1" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "jest-get-type": { - "version": "27.5.1", + "version": "29.2.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.2.0.tgz", + "integrity": "sha512-uXNJlg8hKFEnDgFsrCjznB+sTxdkuqiCL6zMgA75qEbAJjJYTs9XPrvDctrEig2GDow22T/LvHgO57iJhXB/UA==", "dev": true }, "jest-haste-map": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.5.1.tgz", + "integrity": "sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng==", "dev": true, "requires": { "@jest/types": "^27.5.1", @@ -25765,10 +32725,77 @@ "jest-worker": "^27.5.1", "micromatch": "^4.0.4", "walker": "^1.0.7" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "jest-jasmine2": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz", + "integrity": "sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ==", "dev": true, "requires": { "@jest/environment": "^27.5.1", @@ -25792,6 +32819,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -25799,6 +32828,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -25807,6 +32838,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -25814,14 +32847,99 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", "dev": true }, + "expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + } + }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + } + }, + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", "dev": true }, + "jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + } + }, + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25831,24 +32949,38 @@ }, "jest-leak-detector": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz", + "integrity": "sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ==", "dev": true, "requires": { "jest-get-type": "^27.5.1", "pretty-format": "^27.5.1" + }, + "dependencies": { + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", + "dev": true + } } }, "jest-matcher-utils": { - "version": "27.5.1", + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.2.1.tgz", + "integrity": "sha512-hUTBh7H/Mnb6GTpihbLh8uF5rjAMdekfW/oZNXUMAXi7bbmym2HiRpzgqf/zzkjgejMrVAkPdVSQj+32enlUww==", "dev": true, "requires": { "chalk": "^4.0.0", - "jest-diff": "^27.5.1", - "jest-get-type": "^27.5.1", - "pretty-format": "^27.5.1" + "jest-diff": "^29.2.1", + "jest-get-type": "^29.2.0", + "pretty-format": "^29.2.1" }, "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -25856,6 +32988,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -25864,6 +32998,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -25871,14 +33007,39 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "pretty-format": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.2.1.tgz", + "integrity": "sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA==", + "dev": true, + "requires": { + "@jest/schemas": "^29.0.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + } + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25887,22 +33048,49 @@ } }, "jest-message-util": { - "version": "27.5.1", + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.2.1.tgz", + "integrity": "sha512-Dx5nEjw9V8C1/Yj10S/8ivA8F439VS8vTq1L7hEgwHFn9ovSKNpYW/kwNh7UglaEgXO42XxzKJB+2x0nSglFVw==", "dev": true, "requires": { "@babel/code-frame": "^7.12.13", - "@jest/types": "^27.5.1", + "@jest/types": "^29.2.1", "@types/stack-utils": "^2.0.0", "chalk": "^4.0.0", "graceful-fs": "^4.2.9", "micromatch": "^4.0.4", - "pretty-format": "^27.5.1", + "pretty-format": "^29.2.1", "slash": "^3.0.0", "stack-utils": "^2.0.3" }, "dependencies": { + "@jest/types": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.2.1.tgz", + "integrity": "sha512-O/QNDQODLnINEPAI0cl9U6zUIDXEWXt6IC1o2N2QENuos7hlGUIthlKyV4p6ki3TvXFX071blj8HUhgLGquPjw==", + "dev": true, + "requires": { + "@jest/schemas": "^29.0.0", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + } + }, + "@types/yargs": { + "version": "17.0.13", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.13.tgz", + "integrity": "sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg==", + "dev": true, + "requires": { + "@types/yargs-parser": "*" + } + }, "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -25910,6 +33098,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -25918,6 +33108,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -25925,14 +33117,39 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "pretty-format": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.2.1.tgz", + "integrity": "sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA==", + "dev": true, + "requires": { + "@jest/schemas": "^29.0.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + } + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -25942,6 +33159,8 @@ }, "jest-mock": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-27.5.1.tgz", + "integrity": "sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og==", "dev": true, "requires": { "@jest/types": "^27.5.1", @@ -25950,15 +33169,21 @@ }, "jest-pnp-resolver": { "version": "1.2.2", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz", + "integrity": "sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==", "dev": true, "requires": {} }, "jest-regex-util": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.5.1.tgz", + "integrity": "sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg==", "dev": true }, "jest-resolve": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-27.5.1.tgz", + "integrity": "sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw==", "dev": true, "requires": { "@jest/types": "^27.5.1", @@ -25975,6 +33200,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -25982,6 +33209,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -25990,6 +33219,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -25997,14 +33228,34 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -26014,6 +33265,8 @@ }, "jest-resolve-dependencies": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz", + "integrity": "sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg==", "dev": true, "requires": { "@jest/types": "^27.5.1", @@ -26023,6 +33276,8 @@ }, "jest-runner": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-27.5.1.tgz", + "integrity": "sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ==", "dev": true, "requires": { "@jest/console": "^27.5.1", @@ -26050,6 +33305,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -26057,6 +33314,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -26065,6 +33324,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -26072,14 +33333,51 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -26089,6 +33387,8 @@ }, "jest-runtime": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.5.1.tgz", + "integrity": "sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A==", "dev": true, "requires": { "@jest/environment": "^27.5.1", @@ -26117,6 +33417,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -26124,6 +33426,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -26132,6 +33436,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -26139,14 +33445,80 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + } + }, + "get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "dev": true }, - "has-flag": { - "version": "4.0.0", - "dev": true + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -26156,6 +33528,8 @@ }, "jest-serializer": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-27.5.1.tgz", + "integrity": "sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w==", "dev": true, "requires": { "@types/node": "*", @@ -26164,6 +33538,8 @@ }, "jest-snapshot": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-27.5.1.tgz", + "integrity": "sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA==", "dev": true, "requires": { "@babel/core": "^7.7.2", @@ -26192,6 +33568,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -26199,6 +33577,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -26207,6 +33587,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -26214,14 +33596,99 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, + "diff-sequences": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", + "dev": true + }, + "expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" + } + }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + } + }, + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", "dev": true }, + "jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", + "dev": true, + "requires": { + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + } + }, + "jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + } + }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -26230,10 +33697,12 @@ } }, "jest-util": { - "version": "27.5.1", + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.2.1.tgz", + "integrity": "sha512-P5VWDj25r7kj7kl4pN2rG/RN2c1TLfYYYZYULnS/35nFDjBai+hBeo3MDrYZS7p6IoY3YHZnt2vq4L6mKnLk0g==", "dev": true, "requires": { - "@jest/types": "^27.5.1", + "@jest/types": "^29.2.1", "@types/node": "*", "chalk": "^4.0.0", "ci-info": "^3.2.0", @@ -26241,8 +33710,33 @@ "picomatch": "^2.2.3" }, "dependencies": { + "@jest/types": { + "version": "29.2.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.2.1.tgz", + "integrity": "sha512-O/QNDQODLnINEPAI0cl9U6zUIDXEWXt6IC1o2N2QENuos7hlGUIthlKyV4p6ki3TvXFX071blj8HUhgLGquPjw==", + "dev": true, + "requires": { + "@jest/schemas": "^29.0.0", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + } + }, + "@types/yargs": { + "version": "17.0.13", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.13.tgz", + "integrity": "sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg==", + "dev": true, + "requires": { + "@types/yargs-parser": "*" + } + }, "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -26250,6 +33744,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -26258,6 +33754,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -26265,14 +33763,20 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -26282,6 +33786,8 @@ }, "jest-validate": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-27.5.1.tgz", + "integrity": "sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ==", "dev": true, "requires": { "@jest/types": "^27.5.1", @@ -26294,6 +33800,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -26301,6 +33809,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -26309,6 +33819,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -26316,14 +33828,26 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", "dev": true }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -26333,6 +33857,8 @@ }, "jest-watcher": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-27.5.1.tgz", + "integrity": "sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw==", "dev": true, "requires": { "@jest/test-result": "^27.5.1", @@ -26346,6 +33872,8 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { "color-convert": "^2.0.1" @@ -26353,6 +33881,8 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { "ansi-styles": "^4.1.0", @@ -26361,6 +33891,8 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { "color-name": "~1.1.4" @@ -26368,14 +33900,34 @@ }, "color-name": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, + "jest-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", + "dev": true, + "requires": { + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + } + }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -26385,6 +33937,8 @@ }, "jest-worker": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", "requires": { "@types/node": "*", "merge-stream": "^2.0.0", @@ -26392,10 +33946,14 @@ }, "dependencies": { "has-flag": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "supports-color": { "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "requires": { "has-flag": "^4.0.0" } @@ -26403,7 +33961,9 @@ } }, "joi": { - "version": "17.6.0", + "version": "17.6.3", + "resolved": "https://registry.npmjs.org/joi/-/joi-17.6.3.tgz", + "integrity": "sha512-YlQsIaS9MHYekzf1Qe11LjTkNzx9qhYluK3172z38RxYoAUf82XMX1p1DG1H4Wtk2ED/vPdSn9OggqtDu+aTow==", "requires": { "@hapi/hoek": "^9.0.0", "@hapi/topo": "^5.0.0", @@ -26413,22 +33973,38 @@ } }, "jpeg-js": { - "version": "0.4.2" + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.4.tgz", + "integrity": "sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==" }, "js-levenshtein": { - "version": "1.1.6" + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/js-levenshtein/-/js-levenshtein-1.1.6.tgz", + "integrity": "sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==" }, "js-tokens": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, "js-yaml": { "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "requires": { "argparse": "^2.0.1" } }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", + "dev": true + }, "jsdom": { "version": "16.7.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz", + "integrity": "sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==", "dev": true, "requires": { "abab": "^2.0.5", @@ -26458,65 +34034,178 @@ "whatwg-url": "^8.5.0", "ws": "^7.4.6", "xml-name-validator": "^3.0.0" + }, + "dependencies": { + "form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true + }, + "tough-cookie": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.2.tgz", + "integrity": "sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ==", + "dev": true, + "requires": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + } + }, + "universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true + } } }, "jsesc": { - "version": "2.5.2" + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==" }, "json-buffer": { - "version": "3.0.0" - }, - "json-parse-better-errors": { - "version": "1.0.2" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==" }, "json-parse-even-better-errors": { - "version": "2.3.1" + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "json-pointer": { "version": "0.6.2", + "resolved": "https://registry.npmjs.org/json-pointer/-/json-pointer-0.6.2.tgz", + "integrity": "sha512-vLWcKbOaXlO+jvRy4qNd+TI1QUPZzfJj1tpJ3vAXDych5XJf93ftpUKe5pKCrzyIIwgBJcOcCVRUfqQP25afBw==", "requires": { "foreach": "^2.0.4" } }, + "json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true + }, "json-schema-traverse": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true }, "json5": { - "version": "2.2.1" + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", + "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==" + }, + "jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", + "dev": true }, "jsonfile": { "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "requires": { "graceful-fs": "^4.1.6", "universalify": "^2.0.0" } }, + "jsprim": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", + "dev": true, + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, + "katex": { + "version": "0.13.24", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.13.24.tgz", + "integrity": "sha512-jZxYuKCma3VS5UuxOx/rFV1QyGSl3Uy/i0kTJF3HgQ5xMinCQVF8Zd4bMY/9aI9b9A2pjIBOsjSSm68ykTAr8w==", + "requires": { + "commander": "^8.0.0" + }, + "dependencies": { + "commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==" + } + } + }, "keyv": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", + "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", "requires": { "json-buffer": "3.0.0" } }, "kind-of": { - "version": "6.0.3" + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" }, "kleur": { - "version": "3.0.3" + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==" }, "klona": { - "version": "2.0.5" + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/klona/-/klona-2.0.5.tgz", + "integrity": "sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==" }, "latest-version": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz", + "integrity": "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==", "requires": { "package-json": "^6.3.0" } }, + "lazy-ass": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", + "integrity": "sha512-cc8oEVoctTvsFZ/Oje/kGnHbpWHYBe8IAJe4C0QNc3t8uM/0Y8+erSz/7Y1ALuXTEZTMvxXwO6YbX1ey3ujiZw==", + "dev": true + }, "leven": { - "version": "3.1.0" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==" }, "levn": { "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", "dev": true, "requires": { "prelude-ls": "~1.1.2", @@ -26524,13 +34213,89 @@ } }, "lilconfig": { - "version": "2.0.5" + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.6.tgz", + "integrity": "sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==" }, "lines-and-columns": { - "version": "1.2.4" + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" + }, + "listr2": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", + "integrity": "sha512-TyWI8G99GX9GjE54cJ+RrNMcIFBfwMPxc3XTFiAYGN4s10hWROGtOg7+O6u6LE3mNkyld7RSLE6nrKBvTfcs3g==", + "dev": true, + "requires": { + "cli-truncate": "^2.1.0", + "colorette": "^2.0.16", + "log-update": "^4.0.0", + "p-map": "^4.0.0", + "rfdc": "^1.3.0", + "rxjs": "^7.5.1", + "through": "^2.3.8", + "wrap-ansi": "^7.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + } + } }, "load-bmfont": { "version": "1.4.1", + "resolved": "https://registry.npmjs.org/load-bmfont/-/load-bmfont-1.4.1.tgz", + "integrity": "sha512-8UyQoYmdRDy81Brz6aLAUhfZLwr5zV0L3taTQ4hju7m6biuwiWiJXjPhBJxbUQJA8PrkvJ/7Enqmwk2sM14soA==", "requires": { "buffer-equal": "0.0.1", "mime": "^1.3.4", @@ -26543,10 +34308,14 @@ } }, "loader-runner": { - "version": "4.3.0" + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", + "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==" }, "loader-utils": { "version": "2.0.2", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz", + "integrity": "sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==", "requires": { "big.js": "^5.2.2", "emojis-list": "^3.0.0", @@ -26555,127 +34324,274 @@ }, "locate-path": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "requires": { "p-locate": "^4.1.0" } }, - "lodash": { - "version": "4.17.21" - }, - "lodash.assignin": { - "version": "4.2.0" - }, - "lodash.bind": { - "version": "4.2.1" - }, - "lodash.curry": { - "version": "4.1.1" - }, - "lodash.debounce": { - "version": "4.0.8" - }, - "lodash.defaults": { - "version": "4.2.0" - }, - "lodash.filter": { - "version": "4.6.0" - }, - "lodash.flatten": { - "version": "4.4.0" - }, - "lodash.flow": { - "version": "3.5.0" - }, - "lodash.foreach": { - "version": "4.5.0" - }, - "lodash.isequal": { - "version": "4.5.0" - }, - "lodash.map": { - "version": "4.6.0" - }, - "lodash.memoize": { - "version": "4.1.2" - }, - "lodash.merge": { - "version": "4.6.2" - }, - "lodash.pick": { - "version": "4.4.0" - }, - "lodash.reduce": { - "version": "4.6.0" - }, - "lodash.reject": { - "version": "4.6.0" - }, - "lodash.some": { - "version": "4.6.0" - }, - "lodash.sortby": { - "version": "4.7.0" - }, - "lodash.uniq": { - "version": "4.5.0" - }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "lodash.curry": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz", + "integrity": "sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA==" + }, + "lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" + }, + "lodash.flow": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/lodash.flow/-/lodash.flow-3.5.0.tgz", + "integrity": "sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw==" + }, + "lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==" + }, + "lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==" + }, + "lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "dev": true + }, + "lodash.uniq": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", + "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" + }, + "log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "requires": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dev": true, + "requires": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + } + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + } + } + }, "loose-envify": { "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", "requires": { "js-tokens": "^3.0.0 || ^4.0.0" } }, "lower-case": { "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", "requires": { "tslib": "^2.0.3" } }, "lowercase-keys": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" }, "lru-cache": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "requires": { "yallist": "^4.0.0" } }, "lunr": { - "version": "2.3.9" + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", + "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==" }, "lz-string": { "version": "1.4.4", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.4.4.tgz", + "integrity": "sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ==", "dev": true }, "make-dir": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "requires": { "semver": "^6.0.0" }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "makeerror": { "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", "dev": true, "requires": { "tmpl": "1.0.5" } }, "mark.js": { - "version": "8.11.1" + "version": "8.11.1", + "resolved": "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz", + "integrity": "sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==" }, "markdown-escapes": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", + "integrity": "sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg==" }, "marked": { - "version": "4.0.15" + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.1.1.tgz", + "integrity": "sha512-0cNMnTcUJPxbA6uWmCmjWz4NJRe/0Xfk2NhXCUHjew9qJzFN20krFnsUe7QynwqOwa5m1fZ4UDg0ycKFVC0ccw==" }, "md5.js": { "version": "1.3.5", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", "requires": { "hash-base": "^3.0.0", "inherits": "^2.0.1", @@ -26684,18 +34600,24 @@ }, "mdast-squeeze-paragraphs": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-4.0.0.tgz", + "integrity": "sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ==", "requires": { "unist-util-remove": "^2.0.0" } }, "mdast-util-definitions": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz", + "integrity": "sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==", "requires": { "unist-util-visit": "^2.0.0" } }, "mdast-util-to-hast": { "version": "10.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz", + "integrity": "sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA==", "requires": { "@types/mdast": "^3.0.0", "@types/unist": "^2.0.0", @@ -26708,37 +34630,57 @@ } }, "mdast-util-to-string": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", + "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==" }, "mdn-data": { - "version": "2.0.4" + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", + "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" }, "mdurl": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" }, "media-typer": { - "version": "0.3.0" + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==" }, "memfs": { - "version": "3.4.1", + "version": "3.4.7", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.4.7.tgz", + "integrity": "sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw==", "requires": { - "fs-monkey": "1.0.3" + "fs-monkey": "^1.0.3" } }, "merge-descriptors": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, "merge-stream": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" }, "merge2": { - "version": "1.4.1" + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" }, "methods": { - "version": "1.1.2" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" }, "micromatch": { "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", "requires": { "braces": "^3.0.2", "picomatch": "^2.3.1" @@ -26746,59 +34688,74 @@ }, "miller-rabin": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", + "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", "requires": { "bn.js": "^4.0.0", "brorand": "^1.0.1" }, "dependencies": { "bn.js": { - "version": "4.12.0" + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" } } }, "mime": { - "version": "1.6.0" + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" }, "mime-db": { - "version": "1.52.0" + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" }, "mime-types": { "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "requires": { "mime-db": "1.52.0" } }, "mimic-fn": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==" }, "mimic-response": { - "version": "3.1.0" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==" }, "min-document": { "version": "2.19.0", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", + "integrity": "sha512-9Wy1B3m3f66bPPmU5hdA4DR4PB2OfDU/+GS3yAB7IQozE3tqXaVv2zOjgla7MEGSRv95+ILmOuvhLkOK6wJtCQ==", "requires": { "dom-walk": "^0.1.0" } }, "min-indent": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", "dev": true }, - "mini-create-react-context": { - "version": "0.4.1", - "requires": { - "@babel/runtime": "^7.12.1", - "tiny-warning": "^1.0.3" - } - }, "mini-css-extract-plugin": { - "version": "2.6.0", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz", + "integrity": "sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg==", "requires": { "schema-utils": "^4.0.0" }, "dependencies": { "ajv": { "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -26808,12 +34765,16 @@ }, "ajv-keywords": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, "schema-utils": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -26824,107 +34785,157 @@ } }, "minimalistic-assert": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" }, "minimalistic-crypto-utils": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==" }, "minimatch": { "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "requires": { "brace-expansion": "^1.1.7" } }, "minimist": { - "version": "1.2.6" + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", + "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==" }, "mkdirp": { "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", "requires": { "minimist": "^1.2.6" } }, "mkdirp-classic": { - "version": "0.5.3" + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" }, "mobx": { - "version": "6.5.0" + "version": "6.6.2", + "resolved": "https://registry.npmjs.org/mobx/-/mobx-6.6.2.tgz", + "integrity": "sha512-IOpS0bf3+hXIhDIy+CmlNMBfFpAbHS0aVHcNC+xH/TFYEKIIVDKNYRh9eKlXuVfJ1iRKAp0cRVmO145CyJAMVQ==" }, "mobx-react": { - "version": "7.3.0", + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/mobx-react/-/mobx-react-7.5.3.tgz", + "integrity": "sha512-+ltotliKt4Bjn3d8taZH/VFAcRUbaASvsM8/QSvmHXcZ++RZwaFtjl9JkIosy1byaJGEDS3EFFx2InRm2VaSUw==", "requires": { - "mobx-react-lite": "^3.3.0" + "mobx-react-lite": "^3.4.0" } }, "mobx-react-lite": { - "version": "3.3.0", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/mobx-react-lite/-/mobx-react-lite-3.4.0.tgz", + "integrity": "sha512-bRuZp3C0itgLKHu/VNxi66DN/XVkQG7xtoBVWxpvC5FhAqbOCP21+nPhULjnzEqd7xBMybp6KwytdUpZKEgpIQ==", "requires": {} }, "mrmime": { - "version": "1.0.0" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz", + "integrity": "sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==" }, "ms": { - "version": "2.1.2" + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "multicast-dns": { - "version": "7.2.4", + "version": "7.2.5", + "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", + "integrity": "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==", "requires": { "dns-packet": "^5.2.2", "thunky": "^1.0.2" } }, "nanoid": { - "version": "3.3.3" + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", + "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==" }, "napi-build-utils": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", + "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==" }, "natural-compare": { "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, "negotiator": { - "version": "0.6.3" + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" }, "neo-async": { - "version": "2.6.2" + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" }, "no-case": { "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", "requires": { "lower-case": "^2.0.2", "tslib": "^2.0.3" } }, "node-abi": { - "version": "3.15.0", + "version": "3.26.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.26.0.tgz", + "integrity": "sha512-jRVtMFTChbi2i/jqo/i2iP9634KMe+7K1v35mIdj3Mn59i5q27ZYhn+sW6npISM/PQg7HrP2kwtRBMmh5Uvzdg==", "requires": { "semver": "^7.3.5" } }, "node-addon-api": { - "version": "4.3.0" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.0.0.tgz", + "integrity": "sha512-CvkDw2OEnme7ybCykJpVcKH+uAOLV2qLqiyla128dN9TkEWfrYmxG6C2boDe5KcNQqZF3orkqzGgOMvZ/JNekA==" }, "node-emoji": { "version": "1.11.0", + "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.11.0.tgz", + "integrity": "sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==", "requires": { "lodash": "^4.17.21" } }, "node-fetch": { "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "requires": { "whatwg-url": "^5.0.0" }, "dependencies": { "tr46": { - "version": "0.0.3" + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "webidl-conversions": { - "version": "3.0.1" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "whatwg-url": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "requires": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" @@ -26934,19 +34945,27 @@ }, "node-fetch-h2": { "version": "2.3.0", + "resolved": "https://registry.npmjs.org/node-fetch-h2/-/node-fetch-h2-2.3.0.tgz", + "integrity": "sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==", "requires": { "http2-client": "^1.2.5" } }, "node-forge": { - "version": "1.3.1" + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==" }, "node-int64": { "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", "dev": true }, "node-polyfill-webpack-plugin": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/node-polyfill-webpack-plugin/-/node-polyfill-webpack-plugin-1.1.4.tgz", + "integrity": "sha512-Z0XTKj1wRWO8o/Vjobsw5iOJCN+Sua3EZEUc2Ziy9CyVvmHKu6o+t4gUH9GOE0czyPR94LI6ZCV/PpcM8b5yow==", "requires": { "assert": "^2.0.0", "browserify-zlib": "^0.2.0", @@ -26972,19 +34991,36 @@ "url": "^0.11.0", "util": "^0.12.4", "vm-browserify": "^1.1.2" + }, + "dependencies": { + "buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + } } }, "node-readfiles": { "version": "0.2.0", + "resolved": "https://registry.npmjs.org/node-readfiles/-/node-readfiles-0.2.0.tgz", + "integrity": "sha512-SU00ZarexNlE4Rjdm83vglt5Y9yiQ+XI1XpflWlb7q7UTN1JUItm69xMeiQCTxtTfnzt+83T8Cx+vI2ED++VDA==", "requires": { "es6-promise": "^3.2.1" } }, "node-releases": { - "version": "2.0.4" + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", + "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==" }, "node-vibrant": { "version": "3.1.6", + "resolved": "https://registry.npmjs.org/node-vibrant/-/node-vibrant-3.1.6.tgz", + "integrity": "sha512-Wlc/hQmBMOu6xon12ZJHS2N3M+I6J8DhrD3Yo6m5175v8sFkVIN+UjhKVRcO+fqvre89ASTpmiFEP3nPO13SwA==", "requires": { "@jimp/custom": "^0.16.1", "@jimp/plugin-resize": "^0.16.1", @@ -26996,58 +35032,66 @@ }, "dependencies": { "@types/node": { - "version": "10.17.60" + "version": "10.17.60", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", + "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==" } } }, "normalize-path": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" }, "normalize-range": { - "version": "0.1.2" + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==" }, "normalize-url": { - "version": "6.1.0" + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", + "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==" }, "npm-run-path": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "requires": { "path-key": "^3.0.0" } }, - "npmlog": { - "version": "4.1.2", - "requires": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, "nprogress": { - "version": "0.2.0" + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz", + "integrity": "sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA==" }, "nth-check": { - "version": "2.0.1", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", "requires": { "boolbase": "^1.0.0" } }, - "number-is-nan": { - "version": "1.0.1" - }, "nwsapi": { - "version": "2.2.0", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.2.tgz", + "integrity": "sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw==", "dev": true }, "oas-kit-common": { "version": "1.0.8", + "resolved": "https://registry.npmjs.org/oas-kit-common/-/oas-kit-common-1.0.8.tgz", + "integrity": "sha512-pJTS2+T0oGIwgjGpw7sIRU8RQMcUoKCDWFLdBqKB2BNmGpbBMH2sdqAaOXUg8OzonZHU0L7vfJu1mJFEiYDWOQ==", "requires": { "fast-safe-stringify": "^2.0.7" } }, "oas-linter": { "version": "3.2.2", + "resolved": "https://registry.npmjs.org/oas-linter/-/oas-linter-3.2.2.tgz", + "integrity": "sha512-KEGjPDVoU5K6swgo9hJVA/qYGlwfbFx+Kg2QB/kd7rzV5N8N5Mg6PlsoCMohVnQmo+pzJap/F610qTodKzecGQ==", "requires": { "@exodus/schemasafe": "^1.0.0-rc.2", "should": "^13.2.1", @@ -27056,6 +35100,8 @@ }, "oas-resolver": { "version": "2.5.6", + "resolved": "https://registry.npmjs.org/oas-resolver/-/oas-resolver-2.5.6.tgz", + "integrity": "sha512-Yx5PWQNZomfEhPPOphFbZKi9W93CocQj18NlD2Pa4GWZzdZpSJvYwoiuurRI7m3SpcChrnO08hkuQDL3FGsVFQ==", "requires": { "node-fetch-h2": "^2.3.0", "oas-kit-common": "^1.0.8", @@ -27064,21 +35110,68 @@ "yargs": "^17.0.1" }, "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, "emoji-regex": { - "version": "8.0.0" + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "string-width": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, "yargs": { - "version": "17.4.1", + "version": "17.6.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.0.tgz", + "integrity": "sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g==", "requires": { - "cliui": "^7.0.2", + "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", @@ -27088,15 +35181,21 @@ } }, "yargs-parser": { - "version": "21.0.1" + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==" } } }, "oas-schema-walker": { - "version": "1.1.5" + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/oas-schema-walker/-/oas-schema-walker-1.1.5.tgz", + "integrity": "sha512-2yucenq1a9YPmeNExoUa9Qwrt9RFkjqaMAA1X+U7sbb0AqBeTIdMHky9SQQ6iN94bO5NW0W4TRYXerG+BdAvAQ==" }, "oas-validator": { "version": "5.0.8", + "resolved": "https://registry.npmjs.org/oas-validator/-/oas-validator-5.0.8.tgz", + "integrity": "sha512-cu20/HE5N5HKqVygs3dt94eYJfBi0TsZvPVXDhbXQHiEityDN+RROTleefoKRKKJ9dFAF2JBkDHgvWj0sjKGmw==", "requires": { "call-me-maybe": "^1.0.1", "oas-kit-common": "^1.0.8", @@ -27109,75 +35208,83 @@ } }, "object-assign": { - "version": "4.1.1" + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" }, "object-inspect": { - "version": "1.12.0" + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" }, "object-is": { "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz", + "integrity": "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==", "requires": { "call-bind": "^1.0.2", "define-properties": "^1.1.3" } }, "object-keys": { - "version": "1.1.1" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" }, "object.assign": { - "version": "4.1.2", - "requires": { - "call-bind": "^1.0.0", - "define-properties": "^1.1.3", - "has-symbols": "^1.0.1", - "object-keys": "^1.1.1" - } - }, - "object.getownpropertydescriptors": { - "version": "2.1.3", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" - } - }, - "object.values": { - "version": "1.1.5", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", + "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.1" + "define-properties": "^1.1.4", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" } }, "obuf": { - "version": "1.1.2" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" }, "omggif": { - "version": "1.0.10" + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/omggif/-/omggif-1.0.10.tgz", + "integrity": "sha512-LMJTtvgc/nugXj0Vcrrs68Mn2D1r0zf630VNtqtpI1FEO7e+O9FP4gqs9AcnBaSEeoHIPm28u6qgPR0oyEpGSw==" }, "on-finished": { "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "requires": { "ee-first": "1.1.1" } }, "on-headers": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" }, "once": { "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "requires": { "wrappy": "1" } }, "onetime": { "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "requires": { "mimic-fn": "^2.1.0" } }, "open": { "version": "8.4.0", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", + "integrity": "sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==", "requires": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", @@ -27185,17 +35292,23 @@ } }, "openapi-sampler": { - "version": "1.2.3", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/openapi-sampler/-/openapi-sampler-1.3.0.tgz", + "integrity": "sha512-2QfjK1oM9Sv0q82Ae1RrUe3yfFmAyjF548+6eAeb+h/cL1Uj51TW4UezraBEvwEdzoBgfo4AaTLVFGTKj+yYDw==", "requires": { "@types/json-schema": "^7.0.7", "json-pointer": "0.6.2" } }, "opener": { - "version": "1.5.2" + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", + "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==" }, "optionator": { "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", "dev": true, "requires": { "deep-is": "~0.1.3", @@ -27207,41 +35320,63 @@ } }, "os-browserify": { - "version": "0.3.0" + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", + "integrity": "sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A==" + }, + "ospath": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", + "integrity": "sha512-o6E5qJV5zkAbIDNhGSIlyOhScKXgQrSRMilfph0clDfM0nEnBOlKlH4sWDmG95BW/CvwNz0vmm7dJVtU2KlMiA==", + "dev": true }, "p-cancelable": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", + "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==" }, "p-limit": { "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "requires": { "p-try": "^2.0.0" } }, "p-locate": { "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "requires": { "p-limit": "^2.2.0" } }, "p-map": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", "requires": { "aggregate-error": "^3.0.0" } }, "p-retry": { "version": "4.6.2", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz", + "integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==", "requires": { "@types/retry": "0.12.0", "retry": "^0.13.1" } }, "p-try": { - "version": "2.2.0" + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, "package-json": { "version": "6.5.0", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", + "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", "requires": { "got": "^9.6.0", "registry-auth-token": "^4.0.0", @@ -27250,15 +35385,21 @@ }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "pako": { - "version": "1.0.11" + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==" }, "param-case": { "version": "3.0.4", + "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", + "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", "requires": { "dot-case": "^3.0.4", "tslib": "^2.0.3" @@ -27266,12 +35407,16 @@ }, "parent-module": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "requires": { "callsites": "^3.0.0" } }, "parse-asn1": { "version": "5.1.6", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz", + "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==", "requires": { "asn1.js": "^5.2.0", "browserify-aes": "^1.0.0", @@ -27281,13 +35426,19 @@ } }, "parse-bmfont-ascii": { - "version": "1.0.6" + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/parse-bmfont-ascii/-/parse-bmfont-ascii-1.0.6.tgz", + "integrity": "sha512-U4RrVsUFCleIOBsIGYOMKjn9PavsGOXxbvYGtMOEfnId0SVNsgehXh1DxUdVPLoxd5mvcEtvmKs2Mmf0Mpa1ZA==" }, "parse-bmfont-binary": { - "version": "1.0.6" + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/parse-bmfont-binary/-/parse-bmfont-binary-1.0.6.tgz", + "integrity": "sha512-GxmsRea0wdGdYthjuUeWTMWPqm2+FAd4GI8vCvhgJsFnoGhTrLhXDDupwTo7rXVAgaLIGoVHDZS9p/5XbSqeWA==" }, "parse-bmfont-xml": { "version": "1.1.4", + "resolved": "https://registry.npmjs.org/parse-bmfont-xml/-/parse-bmfont-xml-1.1.4.tgz", + "integrity": "sha512-bjnliEOmGv3y1aMEfREMBJ9tfL3WR0i0CKPj61DnSLaoxWR3nLrsQrEbCId/8rF4NyRF0cCqisSVXyQYWM+mCQ==", "requires": { "xml-parse-from-string": "^1.0.0", "xml2js": "^0.4.5" @@ -27295,6 +35446,8 @@ }, "parse-entities": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", + "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", "requires": { "character-entities": "^1.0.0", "character-entities-legacy": "^1.0.0", @@ -27305,10 +35458,14 @@ } }, "parse-headers": { - "version": "2.0.5" + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/parse-headers/-/parse-headers-2.0.5.tgz", + "integrity": "sha512-ft3iAoLOB/MlwbNXgzy43SWGP6sQki2jQvAyBg/zDFAgr9bfNWZIUj42Kw2eJIl8kEi4PbgE6U1Zau/HwI75HA==" }, "parse-json": { "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", "requires": { "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", @@ -27317,56 +35474,88 @@ } }, "parse-numeric-range": { - "version": "1.3.0" + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz", + "integrity": "sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==" }, "parse5": { - "version": "6.0.1" + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.1.tgz", + "integrity": "sha512-kwpuwzB+px5WUg9pyK0IcK/shltJN5/OVhQagxhCQNtT9Y9QRZqNY2e1cmbu/paRh5LMnz/oVTVLBpjFmMZhSg==", + "requires": { + "entities": "^4.4.0" + } }, "parse5-htmlparser2-tree-adapter": { - "version": "6.0.1", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz", + "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==", "requires": { - "parse5": "^6.0.1" + "domhandler": "^5.0.2", + "parse5": "^7.0.0" } }, "parseurl": { - "version": "1.3.3" + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" }, "pascal-case": { "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", "requires": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "path-browserify": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==" }, "path-exists": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" }, "path-is-absolute": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" }, "path-is-inside": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", + "integrity": "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==" }, "path-key": { - "version": "3.1.1" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" }, "path-parse": { - "version": "1.0.7" + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "path-to-regexp": { "version": "1.8.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", + "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", "requires": { "isarray": "0.0.1" } }, "path-type": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" }, "pbkdf2": { "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", + "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", "requires": { "create-hash": "^1.1.2", "create-hmac": "^1.1.4", @@ -27375,48 +35564,86 @@ "sha.js": "^2.4.8" } }, + "pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "dev": true + }, "perfect-scrollbar": { - "version": "1.5.5" + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/perfect-scrollbar/-/perfect-scrollbar-1.5.5.tgz", + "integrity": "sha512-dzalfutyP3e/FOpdlhVryN4AJ5XDVauVWxybSkLZmakFE2sS3y3pc4JnSprw8tGmHvkaG5Edr5T7LBTZ+WWU2g==" + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", + "dev": true }, "phin": { - "version": "2.9.3" + "version": "2.9.3", + "resolved": "https://registry.npmjs.org/phin/-/phin-2.9.3.tgz", + "integrity": "sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA==" }, "picocolors": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" }, "picomatch": { - "version": "2.3.1" + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true }, "pirates": { "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==", "dev": true }, "pixelmatch": { "version": "4.0.2", + "resolved": "https://registry.npmjs.org/pixelmatch/-/pixelmatch-4.0.2.tgz", + "integrity": "sha512-J8B6xqiO37sU/gkcMglv6h5Jbd9xNER7aHzpfRdNmV4IbQBzBpe4l9XmbG+xPF/znacgu2jfEw+wHffaq/YkXA==", "requires": { "pngjs": "^3.0.0" } }, "pkg-dir": { "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "requires": { "find-up": "^4.0.0" } }, "pkg-up": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz", + "integrity": "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==", "requires": { "find-up": "^3.0.0" }, "dependencies": { "find-up": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "requires": { "locate-path": "^3.0.0" } }, "locate-path": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "requires": { "p-locate": "^3.0.0", "path-exists": "^3.0.0" @@ -27424,53 +35651,51 @@ }, "p-locate": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "requires": { "p-limit": "^2.0.0" } }, "path-exists": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==" } } }, "pluralize": { - "version": "8.0.0" + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", + "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==" }, "pngjs": { - "version": "3.4.0" + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-3.4.0.tgz", + "integrity": "sha512-NCrCHhWmnQklfH4MtJMRjZ2a8c80qXeMlQMv2uVp9ISJMTt562SbGd6n2oq0PaPgKm7Z6pL9E2UlLIhC+SHL3w==" }, "polished": { "version": "4.2.2", + "resolved": "https://registry.npmjs.org/polished/-/polished-4.2.2.tgz", + "integrity": "sha512-Sz2Lkdxz6F2Pgnpi9U5Ng/WdWAUZxmHrNPoVlm3aAemxoy2Qy7LGjQg4uf8qKelDAUW94F4np3iH2YPf2qefcQ==", "requires": { "@babel/runtime": "^7.17.8" } }, - "portfinder": { - "version": "1.0.28", - "requires": { - "async": "^2.6.2", - "debug": "^3.1.1", - "mkdirp": "^0.5.5" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "requires": { - "ms": "^2.1.1" - } - } - } - }, "postcss": { - "version": "8.4.13", + "version": "8.4.18", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.18.tgz", + "integrity": "sha512-Wi8mWhncLJm11GATDaQKobXSNEYGUHeQLiQqDFG1qQ5UTDPTEvKw0Xt5NsTpktGTwLps3ByrWsBrG0rB8YQ9oA==", "requires": { - "nanoid": "^3.3.3", + "nanoid": "^3.3.4", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "postcss-calc": { "version": "8.2.4", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz", + "integrity": "sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==", "requires": { "postcss-selector-parser": "^6.0.9", "postcss-value-parser": "^4.2.0" @@ -27478,6 +35703,8 @@ }, "postcss-colormin": { "version": "5.3.0", + "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.3.0.tgz", + "integrity": "sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg==", "requires": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0", @@ -27486,35 +35713,50 @@ } }, "postcss-convert-values": { - "version": "5.1.0", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz", + "integrity": "sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g==", "requires": { + "browserslist": "^4.20.3", "postcss-value-parser": "^4.2.0" } }, "postcss-discard-comments": { - "version": "5.1.1", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz", + "integrity": "sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ==", "requires": {} }, "postcss-discard-duplicates": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz", + "integrity": "sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw==", "requires": {} }, "postcss-discard-empty": { "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz", + "integrity": "sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A==", "requires": {} }, "postcss-discard-overridden": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz", + "integrity": "sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw==", "requires": {} }, "postcss-discard-unused": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-5.1.0.tgz", + "integrity": "sha512-KwLWymI9hbwXmJa0dkrzpRbSJEh0vVUd7r8t0yOGPcfKzyJJxFM8kLyC5Ev9avji6nY95pOp1W6HqIrfT+0VGw==", "requires": { "postcss-selector-parser": "^6.0.5" } }, "postcss-loader": { "version": "6.2.1", + "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-6.2.1.tgz", + "integrity": "sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q==", "requires": { "cosmiconfig": "^7.0.0", "klona": "^2.0.5", @@ -27523,20 +35765,26 @@ }, "postcss-merge-idents": { "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-5.1.1.tgz", + "integrity": "sha512-pCijL1TREiCoog5nQp7wUe+TUonA2tC2sQ54UGeMmryK3UFGIYKqDyjnqd6RcuI4znFn9hWSLNN8xKE/vWcUQw==", "requires": { "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" } }, "postcss-merge-longhand": { - "version": "5.1.4", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz", + "integrity": "sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw==", "requires": { "postcss-value-parser": "^4.2.0", "stylehacks": "^5.1.0" } }, "postcss-merge-rules": { - "version": "5.1.1", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz", + "integrity": "sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ==", "requires": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0", @@ -27546,12 +35794,16 @@ }, "postcss-minify-font-values": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz", + "integrity": "sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-minify-gradients": { "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz", + "integrity": "sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw==", "requires": { "colord": "^2.9.1", "cssnano-utils": "^3.1.0", @@ -27559,7 +35811,9 @@ } }, "postcss-minify-params": { - "version": "5.1.2", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz", + "integrity": "sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg==", "requires": { "browserslist": "^4.16.6", "cssnano-utils": "^3.1.0", @@ -27567,13 +35821,17 @@ } }, "postcss-minify-selectors": { - "version": "5.2.0", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz", + "integrity": "sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg==", "requires": { "postcss-selector-parser": "^6.0.5" } }, "postcss-modules-extract-imports": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz", + "integrity": "sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ==", "dev": true, "requires": { "postcss": "^7.0.5" @@ -27581,10 +35839,14 @@ "dependencies": { "picocolors": { "version": "0.2.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", "dev": true }, "postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, "requires": { "picocolors": "^0.2.1", @@ -27595,6 +35857,8 @@ }, "postcss-modules-local-by-default": { "version": "3.0.3", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-3.0.3.tgz", + "integrity": "sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw==", "dev": true, "requires": { "icss-utils": "^4.1.1", @@ -27605,10 +35869,14 @@ "dependencies": { "picocolors": { "version": "0.2.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", "dev": true }, "postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, "requires": { "picocolors": "^0.2.1", @@ -27619,6 +35887,8 @@ }, "postcss-modules-scope": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-2.2.0.tgz", + "integrity": "sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ==", "dev": true, "requires": { "postcss": "^7.0.6", @@ -27627,10 +35897,14 @@ "dependencies": { "picocolors": { "version": "0.2.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", "dev": true }, "postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, "requires": { "picocolors": "^0.2.1", @@ -27641,6 +35915,8 @@ }, "postcss-modules-values": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-3.0.0.tgz", + "integrity": "sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg==", "dev": true, "requires": { "icss-utils": "^4.0.0", @@ -27649,10 +35925,14 @@ "dependencies": { "picocolors": { "version": "0.2.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", "dev": true }, "postcss": { "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", "dev": true, "requires": { "picocolors": "^0.2.1", @@ -27663,40 +35943,54 @@ }, "postcss-normalize-charset": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz", + "integrity": "sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg==", "requires": {} }, "postcss-normalize-display-values": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz", + "integrity": "sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-positions": { - "version": "5.1.0", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz", + "integrity": "sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-repeat-style": { - "version": "5.1.0", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz", + "integrity": "sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-string": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz", + "integrity": "sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-timing-functions": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz", + "integrity": "sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-normalize-unicode": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz", + "integrity": "sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ==", "requires": { "browserslist": "^4.16.6", "postcss-value-parser": "^4.2.0" @@ -27704,6 +35998,8 @@ }, "postcss-normalize-url": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz", + "integrity": "sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew==", "requires": { "normalize-url": "^6.0.1", "postcss-value-parser": "^4.2.0" @@ -27711,12 +36007,16 @@ }, "postcss-normalize-whitespace": { "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz", + "integrity": "sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-ordered-values": { - "version": "5.1.1", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz", + "integrity": "sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ==", "requires": { "cssnano-utils": "^3.1.0", "postcss-value-parser": "^4.2.0" @@ -27724,12 +36024,16 @@ }, "postcss-reduce-idents": { "version": "5.2.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-5.2.0.tgz", + "integrity": "sha512-BTrLjICoSB6gxbc58D5mdBK8OhXRDqud/zodYfdSi52qvDHdMwk+9kB9xsM8yJThH/sZU5A6QVSmMmaN001gIg==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-reduce-initial": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz", + "integrity": "sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw==", "requires": { "browserslist": "^4.16.6", "caniuse-api": "^3.0.0" @@ -27737,72 +36041,61 @@ }, "postcss-reduce-transforms": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz", + "integrity": "sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ==", "requires": { "postcss-value-parser": "^4.2.0" } }, "postcss-selector-parser": { "version": "6.0.10", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz", + "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==", "requires": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "postcss-sort-media-queries": { - "version": "4.2.1", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-4.3.0.tgz", + "integrity": "sha512-jAl8gJM2DvuIJiI9sL1CuiHtKM4s5aEIomkU8G3LFvbP+p8i7Sz8VV63uieTgoewGqKbi+hxBTiOKJlB35upCg==", "requires": { - "sort-css-media-queries": "2.0.4" + "sort-css-media-queries": "2.1.0" } }, "postcss-svgo": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz", + "integrity": "sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA==", "requires": { "postcss-value-parser": "^4.2.0", "svgo": "^2.7.0" - }, - "dependencies": { - "commander": { - "version": "7.2.0" - }, - "css-tree": { - "version": "1.1.3", - "requires": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" - } - }, - "mdn-data": { - "version": "2.0.14" - }, - "svgo": { - "version": "2.8.0", - "requires": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^4.1.3", - "css-tree": "^1.1.3", - "csso": "^4.2.0", - "picocolors": "^1.0.0", - "stable": "^0.1.8" - } - } } }, "postcss-unique-selectors": { "version": "5.1.1", + "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz", + "integrity": "sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA==", "requires": { "postcss-selector-parser": "^6.0.5" } }, "postcss-value-parser": { - "version": "4.2.0" + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" }, "postcss-zindex": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-5.1.0.tgz", + "integrity": "sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A==", "requires": {} }, "prebuild-install": { - "version": "7.1.0", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz", + "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==", "requires": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", @@ -27811,7 +36104,6 @@ "mkdirp-classic": "^0.5.3", "napi-build-utils": "^1.0.1", "node-abi": "^3.3.0", - "npmlog": "^4.0.1", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", @@ -27821,13 +36113,25 @@ }, "prelude-ls": { "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "dev": true + }, + "prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==" + }, + "pretty-bytes": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", + "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", "dev": true }, - "prepend-http": { - "version": "2.0.0" - }, "pretty-error": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz", + "integrity": "sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==", "requires": { "lodash": "^4.17.20", "renderkid": "^3.0.0" @@ -27835,6 +36139,8 @@ }, "pretty-format": { "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", "dev": true, "requires": { "ansi-regex": "^5.0.1", @@ -27844,38 +36150,56 @@ "dependencies": { "ansi-styles": { "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", "dev": true }, "react-is": { "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", "dev": true } } }, "pretty-time": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz", + "integrity": "sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==" }, "prism-react-renderer": { - "version": "1.3.1", + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz", + "integrity": "sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg==", "requires": {} }, "prismjs": { - "version": "1.28.0" + "version": "1.29.0", + "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz", + "integrity": "sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==" }, "process": { - "version": "0.11.10" + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" }, "process-nextick-args": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "promise": { "version": "7.3.1", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", "requires": { "asap": "~2.0.3" } }, "prompts": { "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", "requires": { "kleur": "^3.0.3", "sisteransi": "^1.0.5" @@ -27883,6 +36207,8 @@ }, "prop-types": { "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", "requires": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", @@ -27890,34 +36216,52 @@ }, "dependencies": { "react-is": { - "version": "16.13.1" + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" } } }, "property-information": { "version": "5.6.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", + "integrity": "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==", "requires": { "xtend": "^4.0.0" } }, "proxy-addr": { "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", "requires": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" }, "dependencies": { "ipaddr.js": { - "version": "1.9.1" + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" } } }, + "proxy-from-env": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", + "integrity": "sha512-F2JHgJQ1iqwnHDcQjVBsq3n/uoaFL+iPW/eAeL7kVxy/2RrWaN4WroKjjvbsoRtv0ftelNyC01bjRhn/bhcf4A==", + "dev": true + }, "psl": { - "version": "1.8.0", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", "dev": true }, "public-encrypt": { "version": "4.0.3", + "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", + "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==", "requires": { "bn.js": "^4.1.0", "browserify-rsa": "^4.0.0", @@ -27928,71 +36272,100 @@ }, "dependencies": { "bn.js": { - "version": "4.12.0" + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", + "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" } } }, "pump": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", "requires": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "punycode": { - "version": "2.1.1" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" }, "pupa": { "version": "2.1.1", + "resolved": "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz", + "integrity": "sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==", "requires": { "escape-goat": "^2.0.0" } }, "pure-color": { - "version": "1.3.0" - }, - "q": { - "version": "1.5.1" + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/pure-color/-/pure-color-1.3.0.tgz", + "integrity": "sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA==" }, "qs": { - "version": "6.10.3", - "requires": { - "side-channel": "^1.0.4" - } + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", + "dev": true }, "querystring": { - "version": "0.2.1" + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz", + "integrity": "sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==" }, "querystring-es3": { - "version": "0.2.1" + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", + "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==" + }, + "querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true }, "queue": { "version": "6.0.2", + "resolved": "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz", + "integrity": "sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA==", "requires": { "inherits": "~2.0.3" } }, "queue-microtask": { - "version": "1.2.3" + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" }, "randombytes": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", "requires": { "safe-buffer": "^5.1.0" } }, "randomfill": { "version": "1.0.4", + "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz", + "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==", "requires": { "randombytes": "^2.0.5", "safe-buffer": "^5.1.0" } }, "range-parser": { - "version": "1.2.0" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "integrity": "sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A==" }, "raw-body": { "version": "2.5.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", + "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", "requires": { "bytes": "3.1.2", "http-errors": "2.0.0", @@ -28001,12 +36374,16 @@ }, "dependencies": { "bytes": { - "version": "3.1.2" + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" } } }, "rc": { "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", "requires": { "deep-extend": "^0.6.0", "ini": "~1.3.0", @@ -28014,13 +36391,22 @@ "strip-json-comments": "~2.0.1" }, "dependencies": { + "ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, "strip-json-comments": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==" } } }, "react": { "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz", + "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" @@ -28028,6 +36414,8 @@ }, "react-base16-styling": { "version": "0.6.0", + "resolved": "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.6.0.tgz", + "integrity": "sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==", "requires": { "base16": "^1.0.0", "lodash.curry": "^4.0.1", @@ -28037,6 +36425,8 @@ }, "react-dev-utils": { "version": "12.0.1", + "resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz", + "integrity": "sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ==", "requires": { "@babel/code-frame": "^7.16.0", "address": "^1.1.2", @@ -28066,12 +36456,16 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -28079,49 +36473,69 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "escape-string-regexp": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" }, "find-up": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "requires": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "has-flag": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "loader-utils": { - "version": "3.2.0" + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.0.tgz", + "integrity": "sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ==" }, "locate-path": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "requires": { "p-locate": "^5.0.0" } }, "p-limit": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "requires": { "yocto-queue": "^0.1.0" } }, "p-locate": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "requires": { "p-limit": "^3.0.2" } }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "requires": { "has-flag": "^4.0.0" } @@ -28130,6 +36544,8 @@ }, "react-dom": { "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz", + "integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", @@ -28137,13 +36553,19 @@ } }, "react-error-overlay": { - "version": "6.0.11" + "version": "6.0.11", + "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz", + "integrity": "sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg==" }, "react-fast-compare": { - "version": "3.2.0" + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.0.tgz", + "integrity": "sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA==" }, "react-helmet-async": { "version": "1.3.0", + "resolved": "https://registry.npmjs.org/react-helmet-async/-/react-helmet-async-1.3.0.tgz", + "integrity": "sha512-9jZ57/dAn9t3q6hneQS0wukqC2ENOBgMNVEhb/ZG9ZSxUetzVIw4iAmEU38IaVg3QGYauQPhSeUTuIUtFglWpg==", "requires": { "@babel/runtime": "^7.12.5", "invariant": "^2.2.4", @@ -28153,10 +36575,14 @@ } }, "react-is": { - "version": "18.1.0" + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" }, "react-json-view": { "version": "1.21.3", + "resolved": "https://registry.npmjs.org/react-json-view/-/react-json-view-1.21.3.tgz", + "integrity": "sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw==", "requires": { "flux": "^4.0.1", "react-base16-styling": "^0.6.0", @@ -28165,10 +36591,14 @@ } }, "react-lifecycles-compat": { - "version": "3.0.4" + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", + "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==" }, "react-loadable": { "version": "npm:@docusaurus/react-loadable@5.5.2", + "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz", + "integrity": "sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ==", "requires": { "@types/react": "*", "prop-types": "^15.6.2" @@ -28176,18 +36606,21 @@ }, "react-loadable-ssr-addon-v5-slorber": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz", + "integrity": "sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A==", "requires": { "@babel/runtime": "^7.10.3" } }, "react-router": { - "version": "5.3.1", + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz", + "integrity": "sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA==", "requires": { "@babel/runtime": "^7.12.13", "history": "^4.9.0", "hoist-non-react-statics": "^3.1.0", "loose-envify": "^1.3.1", - "mini-create-react-context": "^0.4.0", "path-to-regexp": "^1.7.0", "prop-types": "^15.6.2", "react-is": "^16.6.0", @@ -28196,66 +36629,84 @@ }, "dependencies": { "react-is": { - "version": "16.13.1" + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" } } }, "react-router-config": { "version": "5.1.1", + "resolved": "https://registry.npmjs.org/react-router-config/-/react-router-config-5.1.1.tgz", + "integrity": "sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==", "requires": { "@babel/runtime": "^7.1.2" } }, "react-router-dom": { - "version": "5.3.1", + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.4.tgz", + "integrity": "sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ==", "requires": { "@babel/runtime": "^7.12.13", "history": "^4.9.0", "loose-envify": "^1.3.1", "prop-types": "^15.6.2", - "react-router": "5.3.1", + "react-router": "5.3.4", "tiny-invariant": "^1.0.2", "tiny-warning": "^1.0.0" } }, "react-tabs": { "version": "3.2.3", + "resolved": "https://registry.npmjs.org/react-tabs/-/react-tabs-3.2.3.tgz", + "integrity": "sha512-jx325RhRVnS9DdFbeF511z0T0WEqEoMl1uCE3LoZ6VaZZm7ytatxbum0B8bCTmaiV0KsU+4TtLGTGevCic7SWg==", "requires": { "clsx": "^1.1.0", "prop-types": "^15.5.0" } }, "react-textarea-autosize": { - "version": "8.3.3", + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.3.4.tgz", + "integrity": "sha512-CdtmP8Dc19xL8/R6sWvtknD/eCXkQr30dtvC4VmGInhRsfF8X/ihXCq6+9l9qbxmKRiq407/7z5fxE7cVWQNgQ==", "requires": { "@babel/runtime": "^7.10.2", - "use-composed-ref": "^1.0.0", - "use-latest": "^1.0.0" + "use-composed-ref": "^1.3.0", + "use-latest": "^1.2.1" } }, "react-tooltip": { - "version": "4.2.21", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-4.4.2.tgz", + "integrity": "sha512-VncdQWS6wfTZK/J1xW9PLCfKwmeNcxDGZ6mC8ZE7V3UUyNRw/ZpzcqQZm84WWKptAiBwyKL0PgCRrKJk3N440Q==", "requires": { - "prop-types": "^15.7.2", + "prop-types": "^15.8.1", "uuid": "^7.0.3" + }, + "dependencies": { + "uuid": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", + "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==" + } } }, "react-waypoint": { - "version": "10.1.0", + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/react-waypoint/-/react-waypoint-10.3.0.tgz", + "integrity": "sha512-iF1y2c1BsoXuEGz08NoahaLFIGI9gTUAAOKip96HUmylRT6DUtpgoBPjk/Y8dfcFVmfVDvUzWjNXpZyKTOV0SQ==", "requires": { "@babel/runtime": "^7.12.5", "consolidated-events": "^1.1.0 || ^2.0.0", "prop-types": "^15.0.0", - "react-is": "^17.0.1" - }, - "dependencies": { - "react-is": { - "version": "17.0.2" - } + "react-is": "^17.0.1 || ^18.0.0" } }, "readable-stream": { "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -28264,27 +36715,37 @@ }, "readdirp": { "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "requires": { "picomatch": "^2.2.1" } }, "reading-time": { - "version": "1.5.0" + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/reading-time/-/reading-time-1.5.0.tgz", + "integrity": "sha512-onYyVhBNr4CmAxFsKS7bz+uTLRakypIe4R+5A824vBSkQy/hB3fZepoVEf8OVAxzLvK+H/jm9TzpI3ETSm64Kg==" }, "rechoir": { "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", "requires": { "resolve": "^1.1.6" } }, "recursive-readdir": { "version": "2.2.2", + "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz", + "integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==", "requires": { "minimatch": "3.0.4" }, "dependencies": { "minimatch": { "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "requires": { "brace-expansion": "^1.1.7" } @@ -28293,6 +36754,8 @@ }, "redent": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", "dev": true, "requires": { "indent-string": "^4.0.0", @@ -28300,10 +36763,11 @@ } }, "redoc": { - "version": "2.0.0-rc.67", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/redoc/-/redoc-2.0.0.tgz", + "integrity": "sha512-rU8iLdAkT89ywOkYk66Mr+IofqaMASlRvTew0dJvopCORMIPUcPMxjlJbJNC6wsn2vvMnpUFLQ/0ISDWn9BWag==", "requires": { - "@redocly/openapi-core": "^1.0.0-beta.88", - "@redocly/react-dropdown-aria": "^2.0.11", + "@redocly/openapi-core": "^1.0.0-beta.104", "classnames": "^2.3.1", "decko": "^1.2.0", "dompurify": "^2.2.8", @@ -28311,11 +36775,11 @@ "json-pointer": "^0.6.2", "lunr": "^2.3.9", "mark.js": "^8.11.1", - "marked": "^4.0.10", + "marked": "^4.0.15", "mobx-react": "^7.2.0", - "openapi-sampler": "^1.2.1", + "openapi-sampler": "^1.3.0", "path-browserify": "^1.0.1", - "perfect-scrollbar": "^1.5.1", + "perfect-scrollbar": "^1.5.5", "polished": "^4.1.3", "prismjs": "^1.27.0", "prop-types": "^15.7.2", @@ -28328,82 +36792,156 @@ }, "dependencies": { "slugify": { - "version": "1.4.7" + "version": "1.4.7", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.4.7.tgz", + "integrity": "sha512-tf+h5W1IrjNm/9rKKj0JU2MDMruiopx0jjVA5zCdBtcGjfp0+c5rHw/zADLC3IeKlGHtVbHtpfzvYA0OYT+HKg==" }, "style-loader": { "version": "3.3.1", + "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-3.3.1.tgz", + "integrity": "sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ==", "requires": {} } } }, "reftools": { - "version": "1.1.9" + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/reftools/-/reftools-1.1.9.tgz", + "integrity": "sha512-OVede/NQE13xBQ+ob5CKd5KyeJYU2YInb1bmV4nRoOfquZPkAkxuOXicSe1PvqIuZZ4kD13sPKBbR7UFDmli6w==" }, "regenerate": { - "version": "1.4.2" + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" }, "regenerate-unicode-properties": { - "version": "10.0.1", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz", + "integrity": "sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ==", "requires": { "regenerate": "^1.4.2" } }, "regenerator-runtime": { - "version": "0.13.9" + "version": "0.13.10", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz", + "integrity": "sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw==" }, "regenerator-transform": { "version": "0.15.0", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.0.tgz", + "integrity": "sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==", "requires": { "@babel/runtime": "^7.8.4" } }, + "regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + } + }, "regexpu-core": { - "version": "5.0.1", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.2.1.tgz", + "integrity": "sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ==", "requires": { "regenerate": "^1.4.2", - "regenerate-unicode-properties": "^10.0.1", - "regjsgen": "^0.6.0", - "regjsparser": "^0.8.2", + "regenerate-unicode-properties": "^10.1.0", + "regjsgen": "^0.7.1", + "regjsparser": "^0.9.1", "unicode-match-property-ecmascript": "^2.0.0", "unicode-match-property-value-ecmascript": "^2.0.0" } }, "registry-auth-token": { - "version": "4.2.1", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.2.tgz", + "integrity": "sha512-PC5ZysNb42zpFME6D/XlIgtNGdTl8bBOCw90xQLVMpzuuubJKYDWFAEuUNc+Cn8Z8724tg2SDhDRrkVEsqfDMg==", "requires": { - "rc": "^1.2.8" + "rc": "1.2.8" } }, "registry-url": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", + "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", "requires": { "rc": "^1.2.8" } }, "regjsgen": { - "version": "0.6.0" + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.7.1.tgz", + "integrity": "sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA==" }, "regjsparser": { - "version": "0.8.4", + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz", + "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", "requires": { "jsesc": "~0.5.0" }, "dependencies": { "jsesc": { - "version": "0.5.0" + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==" } } }, + "rehype-katex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/rehype-katex/-/rehype-katex-5.0.0.tgz", + "integrity": "sha512-ksSuEKCql/IiIadOHiKRMjypva9BLhuwQNascMqaoGLDVd0k2NlE2wMvgZ3rpItzRKCd6vs8s7MFbb8pcR0AEg==", + "requires": { + "@types/katex": "^0.11.0", + "hast-util-to-text": "^2.0.0", + "katex": "^0.13.0", + "rehype-parse": "^7.0.0", + "unified": "^9.0.0", + "unist-util-visit": "^2.0.0" + } + }, "rehype-parse": { - "version": "6.0.2", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-7.0.1.tgz", + "integrity": "sha512-fOiR9a9xH+Le19i4fGzIEowAbwG7idy2Jzs4mOrFWBSJ0sNUgy0ev871dwWnbOo371SjgjG4pwzrbgSVrKxecw==", "requires": { - "hast-util-from-parse5": "^5.0.0", - "parse5": "^5.0.0", - "xtend": "^4.0.0" + "hast-util-from-parse5": "^6.0.0", + "parse5": "^6.0.0" + }, + "dependencies": { + "parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" + } + } + }, + "relateurl": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", + "integrity": "sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==" + }, + "remark-admonitions": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/remark-admonitions/-/remark-admonitions-1.2.1.tgz", + "integrity": "sha512-Ji6p68VDvD+H1oS95Fdx9Ar5WA2wcDA4kwrrhVU7fGctC6+d3uiMICu7w7/2Xld+lnU7/gi+432+rRbup5S8ow==", + "requires": { + "rehype-parse": "^6.0.2", + "unified": "^8.4.2", + "unist-util-visit": "^2.0.1" }, "dependencies": { "hast-util-from-parse5": { "version": "5.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-5.0.3.tgz", + "integrity": "sha512-gOc8UB99F6eWVWFtM9jUikjN7QkWxB3nY0df5Z0Zq1/Nkwl5V4hAAsl0tmwlgWl/1shlTF8DnNYLO8X6wRV9pA==", "requires": { "ccount": "^1.0.3", "hastscript": "^5.0.0", @@ -28414,6 +36952,8 @@ }, "hastscript": { "version": "5.1.2", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-5.1.2.tgz", + "integrity": "sha512-WlztFuK+Lrvi3EggsqOkQ52rKbxkXL3RwB6t5lwoa8QLMemoWfBuL43eDrwOamJyR7uKQKdmKYaBH1NZBiIRrQ==", "requires": { "comma-separated-tokens": "^1.0.0", "hast-util-parse-selector": "^2.0.0", @@ -28422,23 +36962,24 @@ } }, "parse5": { - "version": "5.1.1" - } - } - }, - "relateurl": { - "version": "0.2.7" - }, - "remark-admonitions": { - "version": "1.2.1", - "requires": { - "rehype-parse": "^6.0.2", - "unified": "^8.4.2", - "unist-util-visit": "^2.0.1" - }, - "dependencies": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==" + }, + "rehype-parse": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-6.0.2.tgz", + "integrity": "sha512-0S3CpvpTAgGmnz8kiCyFLGuW5yA4OQhyNTm/nwPopZ7+PI11WnGl1TTWTGv/2hPEe/g2jRLlhVVSsoDH8waRug==", + "requires": { + "hast-util-from-parse5": "^5.0.0", + "parse5": "^5.0.0", + "xtend": "^4.0.0" + } + }, "unified": { "version": "8.4.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-8.4.2.tgz", + "integrity": "sha512-JCrmN13jI4+h9UAyKEoGcDZV+i1E7BLFuG7OsaDvTXI5P0qhHX+vZO/kOhz9jn8HGENDKbwSeB0nVOg4gVStGA==", "requires": { "bail": "^1.0.0", "extend": "^3.0.0", @@ -28451,6 +36992,8 @@ }, "remark-emoji": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz", + "integrity": "sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w==", "requires": { "emoticon": "^3.2.0", "node-emoji": "^1.10.0", @@ -28458,10 +37001,19 @@ } }, "remark-footnotes": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-2.0.0.tgz", + "integrity": "sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ==" + }, + "remark-math": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/remark-math/-/remark-math-3.0.1.tgz", + "integrity": "sha512-epT77R/HK0x7NqrWHdSV75uNLwn8g9qTyMqCRCDujL0vj/6T6+yhdrR7mjELWtkse+Fw02kijAaBuVcHBor1+Q==" }, "remark-mdx": { "version": "1.6.22", + "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz", + "integrity": "sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ==", "requires": { "@babel/core": "7.12.9", "@babel/helper-plugin-utils": "7.10.4", @@ -28475,6 +37027,8 @@ "dependencies": { "@babel/core": { "version": "7.12.9", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", + "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "requires": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", @@ -28495,10 +37049,14 @@ } }, "@babel/helper-plugin-utils": { - "version": "7.10.4" + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", + "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, "@babel/plugin-proposal-object-rest-spread": { "version": "7.12.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz", + "integrity": "sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA==", "requires": { "@babel/helper-plugin-utils": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.0", @@ -28507,20 +37065,28 @@ }, "@babel/plugin-syntax-jsx": { "version": "7.12.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz", + "integrity": "sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg==", "requires": { "@babel/helper-plugin-utils": "^7.10.4" } }, "semver": { - "version": "5.7.1" + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" }, "source-map": { - "version": "0.5.7" + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==" } } }, "remark-parse": { "version": "8.0.3", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz", + "integrity": "sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q==", "requires": { "ccount": "^1.0.0", "collapse-white-space": "^1.0.2", @@ -28542,45 +37108,130 @@ }, "remark-squeeze-paragraphs": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz", + "integrity": "sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw==", "requires": { "mdast-squeeze-paragraphs": "^4.0.0" } }, "renderkid": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", + "integrity": "sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==", "requires": { "css-select": "^4.1.3", "dom-converter": "^0.2.0", "htmlparser2": "^6.1.0", "lodash": "^4.17.21", "strip-ansi": "^6.0.1" + }, + "dependencies": { + "css-select": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", + "integrity": "sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==", + "requires": { + "boolbase": "^1.0.0", + "css-what": "^6.0.1", + "domhandler": "^4.3.1", + "domutils": "^2.8.0", + "nth-check": "^2.0.1" + } + }, + "dom-serializer": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", + "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", + "requires": { + "domelementtype": "^2.0.1", + "domhandler": "^4.2.0", + "entities": "^2.0.0" + } + }, + "domhandler": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", + "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", + "requires": { + "domelementtype": "^2.2.0" + } + }, + "domutils": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", + "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", + "requires": { + "dom-serializer": "^1.0.1", + "domelementtype": "^2.2.0", + "domhandler": "^4.2.0" + } + }, + "entities": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", + "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==" + }, + "htmlparser2": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz", + "integrity": "sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==", + "requires": { + "domelementtype": "^2.0.1", + "domhandler": "^4.0.0", + "domutils": "^2.5.2", + "entities": "^2.0.0" + } + } } }, "repeat-string": { - "version": "1.6.1" + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==" + }, + "request-progress": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", + "integrity": "sha512-MnWzEHHaxHO2iWiQuHrUPBi/1WeBf5PkxQqNyNvLl9VAYSdXkP8tQ3pBSeCPD+yw0v0Aq1zosWLz0BdeXpWwZg==", + "dev": true, + "requires": { + "throttleit": "^1.0.0" + } }, "require-directory": { - "version": "2.1.1" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==" }, "require-from-string": { - "version": "2.0.2" + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" }, "require-like": { - "version": "0.1.2" + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==" }, "requires-port": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" }, "resolve": { - "version": "1.22.0", + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", "requires": { - "is-core-module": "^2.8.1", + "is-core-module": "^2.9.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" } }, "resolve-cwd": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", "dev": true, "requires": { "resolve-from": "^5.0.0" @@ -28588,50 +37239,88 @@ "dependencies": { "resolve-from": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true } } }, "resolve-from": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" }, "resolve-pathname": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", + "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==" }, "resolve.exports": { "version": "1.1.0", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.0.tgz", + "integrity": "sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==", "dev": true }, "responselike": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==", "requires": { "lowercase-keys": "^1.0.0" } }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, "retry": { - "version": "0.13.1" + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==" }, "reusify": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" + }, + "rfdc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", + "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", + "dev": true }, "rimraf": { "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "requires": { "glob": "^7.1.3" } }, "ripemd160": { "version": "2.0.2", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", "requires": { "hash-base": "^3.0.0", "inherits": "^2.0.1" } }, "rtl-detect": { - "version": "1.0.4" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.0.4.tgz", + "integrity": "sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ==" }, "rtlcss": { "version": "3.5.0", + "resolved": "https://registry.npmjs.org/rtlcss/-/rtlcss-3.5.0.tgz", + "integrity": "sha512-wzgMaMFHQTnyi9YOwsx9LjOxYXJPzS8sYnFaKm6R5ysvTkwzHiB0vxnbHwchHQT65PTdBjDG21/kQBWI7q9O7A==", "requires": { "find-up": "^5.0.0", "picocolors": "^1.0.0", @@ -28641,6 +37330,8 @@ "dependencies": { "find-up": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "requires": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -28648,18 +37339,24 @@ }, "locate-path": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "requires": { "p-locate": "^5.0.0" } }, "p-limit": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "requires": { "yocto-queue": "^0.1.0" } }, "p-locate": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "requires": { "p-limit": "^3.0.2" } @@ -28668,27 +37365,49 @@ }, "run-parallel": { "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", "requires": { "queue-microtask": "^1.2.2" } }, "rxjs": { - "version": "7.5.5", + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.5.7.tgz", + "integrity": "sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA==", "requires": { "tslib": "^2.1.0" } }, "safe-buffer": { - "version": "5.1.2" + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "safe-regex-test": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", + "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "is-regex": "^1.1.4" + } }, "safer-buffer": { - "version": "2.1.2" + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sax": { - "version": "1.2.4" + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" }, "saxes": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", + "integrity": "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==", "dev": true, "requires": { "xmlchars": "^2.2.0" @@ -28696,6 +37415,8 @@ }, "scheduler": { "version": "0.20.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz", + "integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" @@ -28703,6 +37424,8 @@ }, "schema-utils": { "version": "2.7.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", + "integrity": "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==", "requires": { "@types/json-schema": "^7.0.5", "ajv": "^6.12.4", @@ -28711,39 +37434,53 @@ }, "section-matter": { "version": "1.0.0", + "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", + "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", "requires": { "extend-shallow": "^2.0.1", "kind-of": "^6.0.0" } }, "select-hose": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", + "integrity": "sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==" }, "selfsigned": { - "version": "2.0.1", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-2.1.1.tgz", + "integrity": "sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ==", "requires": { "node-forge": "^1" } }, "semver": { - "version": "7.3.7", + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "requires": { "lru-cache": "^6.0.0" } }, "semver-diff": { "version": "3.1.1", + "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", + "integrity": "sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==", "requires": { "semver": "^6.3.0" }, "dependencies": { "semver": { - "version": "6.3.0" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "send": { "version": "0.18.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", + "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", "requires": { "debug": "2.6.9", "depd": "2.0.0", @@ -28762,31 +37499,43 @@ "dependencies": { "debug": { "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" }, "dependencies": { "ms": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" } } }, "ms": { - "version": "2.1.3" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "range-parser": { - "version": "1.2.1" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" } } }, "serialize-javascript": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", "requires": { "randombytes": "^2.1.0" } }, "serve-handler": { "version": "6.1.3", + "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz", + "integrity": "sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w==", "requires": { "bytes": "3.0.0", "content-disposition": "0.5.2", @@ -28799,27 +37548,37 @@ }, "dependencies": { "mime-db": { - "version": "1.33.0" + "version": "1.33.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", + "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==" }, "mime-types": { "version": "2.1.18", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", + "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", "requires": { "mime-db": "~1.33.0" } }, "minimatch": { "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "requires": { "brace-expansion": "^1.1.7" } }, "path-to-regexp": { - "version": "2.2.1" + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz", + "integrity": "sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ==" } } }, "serve-index": { "version": "1.9.1", + "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", + "integrity": "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==", "requires": { "accepts": "~1.3.4", "batch": "0.6.1", @@ -28832,15 +37591,21 @@ "dependencies": { "debug": { "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "requires": { "ms": "2.0.0" } }, "depd": { - "version": "1.1.2" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==" }, "http-errors": { "version": "1.6.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==", "requires": { "depd": "~1.1.2", "inherits": "2.0.3", @@ -28849,21 +37614,31 @@ } }, "inherits": { - "version": "2.0.3" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" }, "ms": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "setprototypeof": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" }, "statuses": { - "version": "1.5.0" + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" } } }, "serve-static": { "version": "1.15.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", "requires": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", @@ -28871,17 +37646,20 @@ "send": "0.18.0" } }, - "set-blocking": { - "version": "2.0.0" - }, "setimmediate": { - "version": "1.0.5" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==" }, "setprototypeof": { - "version": "1.2.0" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, "sha.js": { "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", "requires": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" @@ -28889,20 +37667,26 @@ }, "shallow-clone": { "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", "requires": { "kind-of": "^6.0.2" } }, "shallowequal": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", + "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==" }, "sharp": { - "version": "0.30.4", + "version": "0.30.7", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.30.7.tgz", + "integrity": "sha512-G+MY2YW33jgflKPTXXptVO28HvNOo9G3j0MybYAHeEmby+QuD2U98dT6ueht9cv/XDqZspSpIhoSW+BAKJ7Hig==", "requires": { "color": "^4.2.3", "detect-libc": "^2.0.1", - "node-addon-api": "^4.3.0", - "prebuild-install": "^7.0.1", + "node-addon-api": "^5.0.0", + "prebuild-install": "^7.1.1", "semver": "^7.3.7", "simple-get": "^4.0.1", "tar-fs": "^2.1.1", @@ -28911,6 +37695,8 @@ "dependencies": { "color": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", "requires": { "color-convert": "^2.0.1", "color-string": "^1.9.0" @@ -28918,29 +37704,41 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" } } }, "shebang-command": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "requires": { "shebang-regex": "^3.0.0" } }, "shebang-regex": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" }, "shell-quote": { - "version": "1.7.3" + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.4.tgz", + "integrity": "sha512-8o/QEhSSRb1a5i7TFR0iM4G16Z0vYB2OQVs4G3aAFXjn3T6yEx8AZxy1PgDF7I00LZHYA3WxaSYIf5e5sAX8Rw==" }, "shelljs": { "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", "requires": { "glob": "^7.0.0", "interpret": "^1.0.0", @@ -28949,6 +37747,8 @@ }, "should": { "version": "13.2.3", + "resolved": "https://registry.npmjs.org/should/-/should-13.2.3.tgz", + "integrity": "sha512-ggLesLtu2xp+ZxI+ysJTmNjh2U0TsC+rQ/pfED9bUZZ4DKefP27D+7YJVVTvKsmjLpIi9jAa7itwDGkDDmt1GQ==", "requires": { "should-equal": "^2.0.0", "should-format": "^3.0.3", @@ -28959,32 +37759,44 @@ }, "should-equal": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/should-equal/-/should-equal-2.0.0.tgz", + "integrity": "sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==", "requires": { "should-type": "^1.4.0" } }, "should-format": { "version": "3.0.3", + "resolved": "https://registry.npmjs.org/should-format/-/should-format-3.0.3.tgz", + "integrity": "sha512-hZ58adtulAk0gKtua7QxevgUaXTTXxIi8t41L3zo9AHvjXO1/7sdLECuHeIN2SRtYXpNkmhoUP2pdeWgricQ+Q==", "requires": { "should-type": "^1.3.0", "should-type-adaptors": "^1.0.1" } }, "should-type": { - "version": "1.4.0" + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/should-type/-/should-type-1.4.0.tgz", + "integrity": "sha512-MdAsTu3n25yDbIe1NeN69G4n6mUnJGtSJHygX3+oN0ZbO3DTiATnf7XnYJdGT42JCXurTb1JI0qOBR65shvhPQ==" }, "should-type-adaptors": { "version": "1.1.0", + "resolved": "https://registry.npmjs.org/should-type-adaptors/-/should-type-adaptors-1.1.0.tgz", + "integrity": "sha512-JA4hdoLnN+kebEp2Vs8eBe9g7uy0zbRo+RMcU0EsNy+R+k049Ki+N5tT5Jagst2g7EAja+euFuoXFCa8vIklfA==", "requires": { "should-type": "^1.3.0", "should-util": "^1.0.0" } }, "should-util": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/should-util/-/should-util-1.0.1.tgz", + "integrity": "sha512-oXF8tfxx5cDk8r2kYqlkUJzZpDBqVY/II2WhvU0n9Y3XYvAYRmeaf1PvvIvTgPnv4KJ+ES5M0PyDq5Jp+Ygy2g==" }, "side-channel": { "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", "requires": { "call-bind": "^1.0.0", "get-intrinsic": "^1.0.2", @@ -28992,13 +37804,19 @@ } }, "signal-exit": { - "version": "3.0.7" + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" }, "simple-concat": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==" }, "simple-get": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", "requires": { "decompress-response": "^6.0.0", "once": "^1.3.1", @@ -29007,21 +37825,29 @@ }, "simple-html-tokenizer": { "version": "0.1.1", + "resolved": "https://registry.npmjs.org/simple-html-tokenizer/-/simple-html-tokenizer-0.1.1.tgz", + "integrity": "sha512-Mc/gH3RvlKvB/gkp9XwgDKEWrSYyefIJPGG8Jk1suZms/rISdUuVEMx5O1WBnTWaScvxXDvGJrZQWblUmQHjkQ==", "dev": true }, "simple-swizzle": { "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", "requires": { "is-arrayish": "^0.3.1" }, "dependencies": { "is-arrayish": { - "version": "0.3.2" + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" } } }, "sirv": { "version": "1.0.19", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-1.0.19.tgz", + "integrity": "sha512-JuLThK3TnZG1TAKDwNIqNq6QA2afLOCcm+iE8D1Kj3GA40pSPsxQjjJl0J8X3tsR7T+CP1GavpzLwYkgVLWrZQ==", "requires": { "@polka/url": "^1.0.0-next.20", "mrmime": "^1.0.0", @@ -29029,68 +37855,118 @@ } }, "sisteransi": { - "version": "1.0.5" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" }, "sitemap": { "version": "7.1.1", + "resolved": "https://registry.npmjs.org/sitemap/-/sitemap-7.1.1.tgz", + "integrity": "sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==", "requires": { "@types/node": "^17.0.5", "@types/sax": "^1.2.1", "arg": "^5.0.0", "sax": "^1.2.4" + }, + "dependencies": { + "@types/node": { + "version": "17.0.45", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", + "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==" + } } }, "slash": { - "version": "3.0.0" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" + }, + "slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + } + } }, "slugify": { - "version": "1.6.5" + "version": "1.6.5", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.5.tgz", + "integrity": "sha512-8mo9bslnBO3tr5PEVFzMPIWwWnipGS0xVbYf65zxDqfNwmzYn1LpiKNrR6DlClusuvo+hDHd1zKpmfAe83NQSQ==" }, "sockjs": { "version": "0.3.24", + "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz", + "integrity": "sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==", "requires": { "faye-websocket": "^0.11.3", "uuid": "^8.3.2", "websocket-driver": "^0.7.4" - }, - "dependencies": { - "uuid": { - "version": "8.3.2" - } } }, "sort-css-media-queries": { - "version": "2.0.4" - }, - "source-list-map": { - "version": "2.0.1" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.1.0.tgz", + "integrity": "sha512-IeWvo8NkNiY2vVYdPa27MCQiR0MN0M80johAYFVxWWXQ44KU84WNxjslwBHmc/7ZL2ccwkM7/e6S5aiKZXm7jA==" }, "source-map": { - "version": "0.6.1" + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "source-map-js": { - "version": "1.0.2" - }, - "source-map-resolve": { - "version": "0.6.0", - "dev": true, - "requires": { - "atob": "^2.1.2", - "decode-uri-component": "^0.2.0" - } + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" }, "source-map-support": { "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "requires": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "space-separated-tokens": { - "version": "1.1.5" + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", + "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==" }, "spdy": { "version": "4.0.2", + "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", + "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", "requires": { "debug": "^4.1.0", "handle-thing": "^2.0.0", @@ -29101,6 +37977,8 @@ }, "spdy-transport": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", + "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", "requires": { "debug": "^4.1.0", "detect-node": "^2.0.4", @@ -29111,13 +37989,41 @@ } }, "sprintf-js": { - "version": "1.0.3" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" + }, + "sshpk": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", + "dev": true, + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + } + }, + "ssr-window": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/ssr-window/-/ssr-window-4.0.2.tgz", + "integrity": "sha512-ISv/Ch+ig7SOtw7G2+qkwfVASzazUnvlDTwypdLoPoySv+6MqlOV10VwPSE6EWkGjhW50lUmghPmpYZXMu/+AQ==" }, "stable": { - "version": "0.1.8" + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", + "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" }, "stack-utils": { "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", "dev": true, "requires": { "escape-string-regexp": "^2.0.0" @@ -29125,24 +38031,36 @@ "dependencies": { "escape-string-regexp": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", "dev": true } } }, "state-toggle": { - "version": "1.0.3" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz", + "integrity": "sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==" }, "statuses": { - "version": "2.0.1" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" }, "std-env": { - "version": "3.1.1" + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.3.0.tgz", + "integrity": "sha512-cNNS+VYsXIs5gI6gJipO4qZ8YYT274JHvNnQ1/R/x8Q8mdP0qj0zoMchRXmBNPqp/0eOEhX+3g7g6Fgb7meLIQ==" }, "stickyfill": { - "version": "1.1.1" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/stickyfill/-/stickyfill-1.1.1.tgz", + "integrity": "sha512-GCp7vHAfpao+Qh/3Flh9DXEJ/qSi0KJwJw6zYlZOtRYXWUIpMM6mC2rIep/dK8RQqwW0KxGJIllmjPIBOGN8AA==" }, "stream-browserify": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz", + "integrity": "sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==", "requires": { "inherits": "~2.0.4", "readable-stream": "^3.5.0" @@ -29150,6 +38068,8 @@ }, "stream-http": { "version": "3.2.0", + "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-3.2.0.tgz", + "integrity": "sha512-Oq1bLqisTyK3TSCXpPbT4sdeYNdmyZJv1LxpEm2vu1ZhK89kSE5YXwZc3cWk0MagGaKriBh9mCFbVGtO+vY29A==", "requires": { "builtin-status-codes": "^3.0.0", "inherits": "^2.0.4", @@ -29158,18 +38078,17 @@ } }, "string_decoder": { - "version": "1.3.0", - "requires": { - "safe-buffer": "~5.2.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1" - } + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" } }, "string-length": { "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", "dev": true, "requires": { "char-regex": "^1.0.2", @@ -29178,6 +38097,8 @@ }, "string-width": { "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", "requires": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", @@ -29185,10 +38106,14 @@ }, "dependencies": { "ansi-regex": { - "version": "6.0.1" + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" }, "strip-ansi": { "version": "7.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", + "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", "requires": { "ansi-regex": "^6.0.1" } @@ -29196,21 +38121,29 @@ } }, "string.prototype.trimend": { - "version": "1.0.4", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", + "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3" + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" } }, "string.prototype.trimstart": { - "version": "1.0.4", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", + "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3" + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" } }, "stringify-object": { "version": "3.3.0", + "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", + "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", "requires": { "get-own-enumerable-property-symbols": "^3.0.0", "is-obj": "^1.0.1", @@ -29219,32 +38152,46 @@ }, "strip-ansi": { "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { "ansi-regex": "^5.0.1" } }, "strip-bom": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", "dev": true }, "strip-bom-string": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", + "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==" }, "strip-final-newline": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==" }, "strip-indent": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", "dev": true, "requires": { "min-indent": "^1.0.0" } }, "strip-json-comments": { - "version": "3.1.1" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" }, "style-loader": { "version": "1.3.0", + "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-1.3.0.tgz", + "integrity": "sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q==", "dev": true, "requires": { "loader-utils": "^2.0.0", @@ -29253,12 +38200,16 @@ }, "style-to-object": { "version": "0.3.0", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", + "integrity": "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==", "requires": { "inline-style-parser": "0.1.1" } }, "styled-components": { "version": "5.3.3", + "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.3.tgz", + "integrity": "sha512-++4iHwBM7ZN+x6DtPPWkCI4vdtwumQ+inA/DdAsqYd4SVgUKJie5vXyzotA00ttcFdQkCng7zc6grwlfIfw+lw==", "requires": { "@babel/helper-module-imports": "^7.0.0", "@babel/traverse": "^7.4.5", @@ -29274,6 +38225,8 @@ }, "stylehacks": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.0.tgz", + "integrity": "sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q==", "requires": { "browserslist": "^4.16.6", "postcss-selector-parser": "^6.0.4" @@ -29281,12 +38234,16 @@ }, "supports-color": { "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "requires": { "has-flag": "^3.0.0" } }, "supports-hyperlinks": { - "version": "2.2.0", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz", + "integrity": "sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==", "dev": true, "requires": { "has-flag": "^4.0.0", @@ -29295,10 +38252,14 @@ "dependencies": { "has-flag": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" @@ -29307,10 +38268,14 @@ } }, "supports-preserve-symlinks-flag": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" }, "svg-inline-loader": { "version": "0.8.2", + "resolved": "https://registry.npmjs.org/svg-inline-loader/-/svg-inline-loader-0.8.2.tgz", + "integrity": "sha512-kbrcEh5n5JkypaSC152eGfGcnT4lkR0eSfvefaUJkLqgGjRQJyKDvvEE/CCv5aTSdfXuc+N98w16iAojhShI3g==", "dev": true, "requires": { "loader-utils": "^1.1.0", @@ -29320,6 +38285,8 @@ "dependencies": { "json5": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", "dev": true, "requires": { "minimist": "^1.2.0" @@ -29327,6 +38294,8 @@ }, "loader-utils": { "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz", + "integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==", "dev": true, "requires": { "big.js": "^5.2.2", @@ -29337,80 +38306,80 @@ } }, "svg-parser": { - "version": "2.0.4" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", + "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==" }, "svgo": { - "version": "1.3.2", + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.8.0.tgz", + "integrity": "sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg==", "requires": { - "chalk": "^2.4.1", - "coa": "^2.0.2", - "css-select": "^2.0.0", - "css-select-base-adapter": "^0.1.1", - "css-tree": "1.0.0-alpha.37", - "csso": "^4.0.2", - "js-yaml": "^3.13.1", - "mkdirp": "~0.5.1", - "object.values": "^1.1.0", - "sax": "~1.2.4", - "stable": "^0.1.8", - "unquote": "~1.1.1", - "util.promisify": "~1.0.0" + "@trysound/sax": "0.2.0", + "commander": "^7.2.0", + "css-select": "^4.1.3", + "css-tree": "^1.1.3", + "csso": "^4.2.0", + "picocolors": "^1.0.0", + "stable": "^0.1.8" }, "dependencies": { - "argparse": { - "version": "1.0.10", - "requires": { - "sprintf-js": "~1.0.2" - } + "commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" }, "css-select": { - "version": "2.1.0", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", + "integrity": "sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==", "requires": { "boolbase": "^1.0.0", - "css-what": "^3.2.1", - "domutils": "^1.7.0", - "nth-check": "^1.0.2" + "css-what": "^6.0.1", + "domhandler": "^4.3.1", + "domutils": "^2.8.0", + "nth-check": "^2.0.1" } }, - "css-what": { - "version": "3.4.2" - }, "dom-serializer": { - "version": "0.2.2", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", + "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", "requires": { "domelementtype": "^2.0.1", + "domhandler": "^4.2.0", "entities": "^2.0.0" } }, - "domutils": { - "version": "1.7.0", + "domhandler": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", + "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", "requires": { - "dom-serializer": "0", - "domelementtype": "1" - }, - "dependencies": { - "domelementtype": { - "version": "1.3.1" - } + "domelementtype": "^2.2.0" } }, - "js-yaml": { - "version": "3.14.1", + "domutils": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", + "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", "requires": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" + "dom-serializer": "^1.0.1", + "domelementtype": "^2.2.0", + "domhandler": "^4.2.0" } }, - "nth-check": { - "version": "1.0.2", - "requires": { - "boolbase": "~1.0.0" - } + "entities": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", + "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==" } } }, "swagger2openapi": { "version": "7.0.8", + "resolved": "https://registry.npmjs.org/swagger2openapi/-/swagger2openapi-7.0.8.tgz", + "integrity": "sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==", "requires": { "call-me-maybe": "^1.0.1", "node-fetch": "^2.6.1", @@ -29425,21 +38394,68 @@ "yargs": "^17.0.1" }, "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, "emoji-regex": { - "version": "8.0.0" + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "string-width": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, "yargs": { - "version": "17.4.1", + "version": "17.6.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.0.tgz", + "integrity": "sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g==", "requires": { - "cliui": "^7.0.2", + "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", @@ -29449,19 +38465,36 @@ } }, "yargs-parser": { - "version": "21.0.1" + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==" } } }, + "swiper": { + "version": "8.4.4", + "resolved": "https://registry.npmjs.org/swiper/-/swiper-8.4.4.tgz", + "integrity": "sha512-jA/8BfOZwT8PqPSnMX0TENZYitXEhNa7ZSNj1Diqh5LZyUJoBQaZcqAiPQ/PIg1+IPaRn/V8ZYVb0nxHMh51yw==", + "requires": { + "dom7": "^4.0.4", + "ssr-window": "^4.0.2" + } + }, "symbol-tree": { "version": "3.2.4", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", "dev": true }, "tapable": { - "version": "2.2.1" + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==" }, "tar-fs": { "version": "2.1.1", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", + "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", "requires": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", @@ -29471,6 +38504,8 @@ }, "tar-stream": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", "requires": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", @@ -29481,6 +38516,8 @@ }, "terminal-link": { "version": "2.1.1", + "resolved": "https://registry.npmjs.org/terminal-link/-/terminal-link-2.1.1.tgz", + "integrity": "sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==", "dev": true, "requires": { "ansi-escapes": "^4.2.1", @@ -29488,54 +38525,39 @@ } }, "terser": { - "version": "5.13.1", + "version": "5.15.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.15.1.tgz", + "integrity": "sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw==", "requires": { + "@jridgewell/source-map": "^0.3.2", "acorn": "^8.5.0", "commander": "^2.20.0", - "source-map": "~0.8.0-beta.0", "source-map-support": "~0.5.20" }, "dependencies": { "commander": { - "version": "2.20.3" - }, - "source-map": { - "version": "0.8.0-beta.0", - "requires": { - "whatwg-url": "^7.0.0" - } - }, - "tr46": { - "version": "1.0.1", - "requires": { - "punycode": "^2.1.0" - } - }, - "webidl-conversions": { - "version": "4.0.2" - }, - "whatwg-url": { - "version": "7.1.0", - "requires": { - "lodash.sortby": "^4.7.0", - "tr46": "^1.0.1", - "webidl-conversions": "^4.0.2" - } + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" } } }, "terser-webpack-plugin": { - "version": "5.3.1", + "version": "5.3.6", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz", + "integrity": "sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ==", "requires": { + "@jridgewell/trace-mapping": "^0.3.14", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.0", - "source-map": "^0.6.1", - "terser": "^5.7.2" + "terser": "^5.14.1" }, "dependencies": { "schema-utils": { "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "requires": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", @@ -29546,6 +38568,8 @@ }, "test-exclude": { "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", "dev": true, "requires": { "@istanbuljs/schema": "^0.1.2", @@ -29554,100 +38578,166 @@ } }, "text-table": { - "version": "0.2.0" + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" }, "throat": { "version": "6.0.1", + "resolved": "https://registry.npmjs.org/throat/-/throat-6.0.1.tgz", + "integrity": "sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w==", + "dev": true + }, + "throttleit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", + "integrity": "sha512-rkTVqu6IjfQ/6+uNuuc3sZek4CEYxTJom3IktzgdSxcZqdARuebbA/f4QmAxMQIxqq9ZLEUkSYqvuk1I6VKq4g==", + "dev": true + }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", "dev": true }, "thunky": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", + "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, "timers-browserify": { "version": "2.0.12", + "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz", + "integrity": "sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==", "requires": { "setimmediate": "^1.0.4" } }, "timm": { - "version": "1.7.1" + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/timm/-/timm-1.7.1.tgz", + "integrity": "sha512-IjZc9KIotudix8bMaBW6QvMuq64BrJWFs1+4V0lXwWGQZwH+LnX87doAYhem4caOEusRP9/g6jVDQmZ8XOk1nw==" }, "tiny-invariant": { - "version": "1.2.0" + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.1.tgz", + "integrity": "sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==" }, "tiny-warning": { - "version": "1.0.3" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", + "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" }, "tinycolor2": { - "version": "1.4.2" + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.4.2.tgz", + "integrity": "sha512-vJhccZPs965sV/L2sU4oRQVAos0pQXwsvTLkWYdqJ+a8Q5kPFzJTuOFwy7UniPli44NKQGAglksjvOcpo95aZA==" + }, + "tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dev": true, + "requires": { + "rimraf": "^3.0.0" + } }, "tmpl": { "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", "dev": true }, "to-fast-properties": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==" }, "to-readable-stream": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", + "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==" }, "to-regex-range": { "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "requires": { "is-number": "^7.0.0" } }, "toidentifier": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" }, "totalist": { - "version": "1.1.0" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz", + "integrity": "sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g==" }, "tough-cookie": { - "version": "4.0.0", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", "dev": true, "requires": { - "psl": "^1.1.33", - "punycode": "^2.1.1", - "universalify": "^0.1.2" - }, - "dependencies": { - "universalify": { - "version": "0.1.2", - "dev": true - } + "psl": "^1.1.28", + "punycode": "^2.1.1" } }, "tr46": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", "dev": true, "requires": { "punycode": "^2.1.1" } }, "trim": { - "version": "0.0.1" + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", + "integrity": "sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ==" }, "trim-trailing-lines": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz", + "integrity": "sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ==" }, "trough": { - "version": "1.0.5" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", + "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==" }, "tslib": { - "version": "2.4.0" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" }, "tty-browserify": { - "version": "0.0.1" + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz", + "integrity": "sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw==" }, "tunnel-agent": { "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", "requires": { "safe-buffer": "^5.0.1" } }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "dev": true + }, "type-check": { "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", "dev": true, "requires": { "prelude-ls": "~1.1.2" @@ -29655,13 +38745,19 @@ }, "type-detect": { "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", "dev": true }, "type-fest": { - "version": "2.12.2" + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==" }, "type-is": { "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", "requires": { "media-typer": "0.3.0", "mime-types": "~2.1.24" @@ -29669,19 +38765,27 @@ }, "typedarray-to-buffer": { "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", "requires": { "is-typedarray": "^1.0.0" } }, "typescript": { - "version": "4.6.4", + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.4.tgz", + "integrity": "sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==", "peer": true }, "ua-parser-js": { - "version": "0.7.31" + "version": "0.7.32", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.32.tgz", + "integrity": "sha512-f9BESNVhzlhEFf2CHMSj40NWOjYPl1YKYbrvIr/hFTDEmLq7SRbWvm7FcdcpCYT95zrOhC7gZSxjdnnTpBcwVw==" }, "unbox-primitive": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", "requires": { "call-bind": "^1.0.2", "has-bigints": "^1.0.2", @@ -29691,29 +38795,41 @@ }, "unherit": { "version": "1.1.3", + "resolved": "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz", + "integrity": "sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==", "requires": { "inherits": "^2.0.0", "xtend": "^4.0.0" } }, "unicode-canonical-property-names-ecmascript": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", + "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==" }, "unicode-match-property-ecmascript": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", "requires": { "unicode-canonical-property-names-ecmascript": "^2.0.0", "unicode-property-aliases-ecmascript": "^2.0.0" } }, "unicode-match-property-value-ecmascript": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz", + "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==" }, "unicode-property-aliases-ecmascript": { - "version": "2.0.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", + "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==" }, "unified": { "version": "9.2.0", + "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", + "integrity": "sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==", "requires": { "bail": "^1.0.0", "extend": "^3.0.0", @@ -29725,42 +38841,68 @@ }, "unique-string": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", "requires": { "crypto-random-string": "^2.0.0" } }, "unist-builder": { - "version": "2.0.3" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz", + "integrity": "sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==" + }, + "unist-util-find-after": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-3.0.0.tgz", + "integrity": "sha512-ojlBqfsBftYXExNu3+hHLfJQ/X1jYY/9vdm4yZWjIbf0VuWF6CRufci1ZyoD/wV2TYMKxXUoNuoqwy+CkgzAiQ==", + "requires": { + "unist-util-is": "^4.0.0" + } }, "unist-util-generated": { - "version": "1.1.6" + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz", + "integrity": "sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==" }, "unist-util-is": { - "version": "4.1.0" + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", + "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==" }, "unist-util-position": { - "version": "3.1.0" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz", + "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==" }, "unist-util-remove": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz", + "integrity": "sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q==", "requires": { "unist-util-is": "^4.0.0" } }, "unist-util-remove-position": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz", + "integrity": "sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA==", "requires": { "unist-util-visit": "^2.0.0" } }, "unist-util-stringify-position": { "version": "2.0.3", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", + "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", "requires": { "@types/unist": "^2.0.2" } }, "unist-util-visit": { "version": "2.0.3", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", + "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", "requires": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0", @@ -29769,22 +38911,42 @@ }, "unist-util-visit-parents": { "version": "3.1.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", + "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", "requires": { "@types/unist": "^2.0.0", "unist-util-is": "^4.0.0" } }, "universalify": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" }, "unpipe": { - "version": "1.0.0" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==" + }, + "untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true }, - "unquote": { - "version": "1.1.1" + "update-browserslist-db": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz", + "integrity": "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==", + "requires": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + } }, "update-notifier": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz", + "integrity": "sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==", "requires": { "boxen": "^5.0.0", "chalk": "^4.1.0", @@ -29804,12 +38966,16 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "boxen": { "version": "5.1.2", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", + "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", "requires": { "ansi-align": "^3.0.0", "camelcase": "^6.2.0", @@ -29823,31 +38989,58 @@ }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, + "ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==" + }, "cli-boxes": { - "version": "2.2.1" + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", + "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==" }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "emoji-regex": { - "version": "8.0.0" + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "has-flag": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" + }, + "is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "requires": { + "ci-info": "^2.0.0" + } }, "string-width": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -29856,21 +39049,29 @@ }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "requires": { "has-flag": "^4.0.0" } }, "type-fest": { - "version": "0.20.2" + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" }, "widest-line": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", + "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", "requires": { "string-width": "^4.0.0" } }, "wrap-ansi": { "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "requires": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -29881,27 +39082,37 @@ }, "uri-js": { "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "requires": { "punycode": "^2.1.0" } }, "url": { "version": "0.11.0", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", + "integrity": "sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ==", "requires": { "punycode": "1.3.2", "querystring": "0.2.0" }, "dependencies": { "punycode": { - "version": "1.3.2" + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" }, "querystring": { - "version": "0.2.0" + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==" } } }, "url-loader": { "version": "4.1.1", + "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz", + "integrity": "sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==", "requires": { "loader-utils": "^2.0.0", "mime-types": "^2.1.27", @@ -29910,6 +39121,8 @@ "dependencies": { "schema-utils": { "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "requires": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", @@ -29918,72 +39131,98 @@ } } }, + "url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "requires": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "url-parse-lax": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ==", "requires": { "prepend-http": "^2.0.0" } }, "url-template": { - "version": "2.0.8" + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/url-template/-/url-template-2.0.8.tgz", + "integrity": "sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw==" }, "use-composed-ref": { "version": "1.3.0", + "resolved": "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.3.0.tgz", + "integrity": "sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ==", "requires": {} }, "use-isomorphic-layout-effect": { "version": "1.1.2", + "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz", + "integrity": "sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==", "requires": {} }, "use-latest": { "version": "1.2.1", + "resolved": "https://registry.npmjs.org/use-latest/-/use-latest-1.2.1.tgz", + "integrity": "sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw==", "requires": { "use-isomorphic-layout-effect": "^1.1.1" } }, "utif": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/utif/-/utif-2.0.1.tgz", + "integrity": "sha512-Z/S1fNKCicQTf375lIP9G8Sa1H/phcysstNrrSdZKj1f9g58J4NMgb5IgiEZN9/nLMPDwF0W7hdOe9Qq2IYoLg==", "requires": { "pako": "^1.0.5" } }, "util": { - "version": "0.12.4", + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", "requires": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", - "safe-buffer": "^5.1.2", "which-typed-array": "^1.1.2" } }, "util-deprecate": { - "version": "1.0.2" - }, - "util.promisify": { - "version": "1.0.1", - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.2", - "has-symbols": "^1.0.1", - "object.getownpropertydescriptors": "^2.1.0" - } + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "utila": { - "version": "0.4.0" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", + "integrity": "sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==" }, "utility-types": { - "version": "3.10.0" + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.10.0.tgz", + "integrity": "sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==" }, "utils-merge": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, "uuid": { - "version": "7.0.3" + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" }, "v8-to-istanbul": { "version": "8.1.1", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz", + "integrity": "sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w==", "dev": true, "requires": { "@types/istanbul-lib-coverage": "^2.0.1", @@ -29992,19 +39231,38 @@ }, "dependencies": { "source-map": { - "version": "0.7.3", + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", "dev": true } } }, "value-equal": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", + "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==" }, "vary": { - "version": "1.1.2" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==" + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } }, "vfile": { "version": "4.2.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz", + "integrity": "sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==", "requires": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", @@ -30013,20 +39271,28 @@ } }, "vfile-location": { - "version": "3.2.0" + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz", + "integrity": "sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==" }, "vfile-message": { "version": "2.0.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", + "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", "requires": { "@types/unist": "^2.0.0", "unist-util-stringify-position": "^2.0.0" } }, "vm-browserify": { - "version": "1.1.2" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", + "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==" }, "w3c-hr-time": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", + "integrity": "sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==", "dev": true, "requires": { "browser-process-hrtime": "^1.0.0" @@ -30034,6 +39300,8 @@ }, "w3c-xmlserializer": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz", + "integrity": "sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==", "dev": true, "requires": { "xml-name-validator": "^3.0.0" @@ -30041,23 +39309,39 @@ }, "wait-on": { "version": "6.0.1", + "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-6.0.1.tgz", + "integrity": "sha512-zht+KASY3usTY5u2LgaNqn/Cd8MukxLGjdcZxT2ns5QzDmTFc4XoWBgC+C/na+sMRZTuVygQoMYwdcVjHnYIVw==", "requires": { "axios": "^0.25.0", "joi": "^17.6.0", "lodash": "^4.17.21", "minimist": "^1.2.5", "rxjs": "^7.5.4" + }, + "dependencies": { + "axios": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz", + "integrity": "sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g==", + "requires": { + "follow-redirects": "^1.14.7" + } + } } }, "walker": { "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", "dev": true, "requires": { "makeerror": "1.0.12" } }, "watchpack": { - "version": "2.3.1", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz", + "integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==", "requires": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" @@ -30065,61 +39349,70 @@ }, "wbuf": { "version": "1.7.3", + "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", + "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", "requires": { "minimalistic-assert": "^1.0.0" } }, "web-namespaces": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz", + "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==" }, "webidl-conversions": { "version": "6.1.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-6.1.0.tgz", + "integrity": "sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==", "dev": true }, "webpack": { - "version": "5.72.0", + "version": "5.74.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.74.0.tgz", + "integrity": "sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA==", "requires": { "@types/eslint-scope": "^3.7.3", "@types/estree": "^0.0.51", "@webassemblyjs/ast": "1.11.1", "@webassemblyjs/wasm-edit": "1.11.1", "@webassemblyjs/wasm-parser": "1.11.1", - "acorn": "^8.4.1", + "acorn": "^8.7.1", "acorn-import-assertions": "^1.7.6", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.9.2", + "enhanced-resolve": "^5.10.0", "es-module-lexer": "^0.9.0", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.9", - "json-parse-better-errors": "^1.0.2", + "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.1.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.1.3", - "watchpack": "^2.3.1", + "watchpack": "^2.4.0", "webpack-sources": "^3.2.3" }, "dependencies": { "schema-utils": { "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", "requires": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" } - }, - "webpack-sources": { - "version": "3.2.3" } } }, "webpack-bundle-analyzer": { - "version": "4.5.0", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.6.1.tgz", + "integrity": "sha512-oKz9Oz9j3rUciLNfpGFjOb49/jEpXNmWdVH8Ls//zNcnLlQdTGXQQMsBbb/gR7Zl8WNLxVCq+0Hqbx3zv6twBw==", "requires": { "acorn": "^8.0.4", "acorn-walk": "^8.0.0", @@ -30133,16 +39426,22 @@ }, "dependencies": { "acorn-walk": { - "version": "8.2.0" + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==" }, "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -30150,21 +39449,31 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "commander": { - "version": "7.2.0" + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" }, "has-flag": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "requires": { "has-flag": "^4.0.0" } @@ -30172,10 +39481,12 @@ } }, "webpack-dev-middleware": { - "version": "5.3.1", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", + "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", "requires": { "colorette": "^2.0.10", - "memfs": "^3.4.1", + "memfs": "^3.4.3", "mime-types": "^2.1.31", "range-parser": "^1.2.1", "schema-utils": "^4.0.0" @@ -30183,6 +39494,8 @@ "dependencies": { "ajv": { "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -30192,18 +39505,21 @@ }, "ajv-keywords": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, - "colorette": { - "version": "2.0.16" - }, "range-parser": { - "version": "1.2.1" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "schema-utils": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -30214,12 +39530,15 @@ } }, "webpack-dev-server": { - "version": "4.8.1", + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.11.1.tgz", + "integrity": "sha512-lILVz9tAUy1zGFwieuaQtYiadImb5M3d+H+L1zDYalYoDl0cksAB1UNyuE5MMWJrG6zR1tXkCP2fitl7yoUJiw==", "requires": { "@types/bonjour": "^3.5.9", "@types/connect-history-api-fallback": "^1.3.5", "@types/express": "^4.17.13", "@types/serve-index": "^1.9.1", + "@types/serve-static": "^1.13.10", "@types/sockjs": "^0.3.33", "@types/ws": "^8.5.1", "ansi-html-community": "^0.0.8", @@ -30227,7 +39546,7 @@ "chokidar": "^3.5.3", "colorette": "^2.0.10", "compression": "^1.7.4", - "connect-history-api-fallback": "^1.6.0", + "connect-history-api-fallback": "^2.0.0", "default-gateway": "^6.0.3", "express": "^4.17.3", "graceful-fs": "^4.2.6", @@ -30236,12 +39555,11 @@ "ipaddr.js": "^2.0.1", "open": "^8.0.9", "p-retry": "^4.5.0", - "portfinder": "^1.0.28", "rimraf": "^3.0.2", "schema-utils": "^4.0.0", - "selfsigned": "^2.0.1", + "selfsigned": "^2.1.1", "serve-index": "^1.9.1", - "sockjs": "^0.3.21", + "sockjs": "^0.3.24", "spdy": "^4.0.2", "webpack-dev-middleware": "^5.3.1", "ws": "^8.4.2" @@ -30249,6 +39567,8 @@ "dependencies": { "ajv": { "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -30258,15 +39578,16 @@ }, "ajv-keywords": { "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", "requires": { "fast-deep-equal": "^3.1.3" } }, - "colorette": { - "version": "2.0.16" - }, "schema-utils": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "requires": { "@types/json-schema": "^7.0.9", "ajv": "^8.8.0", @@ -30275,27 +39596,31 @@ } }, "ws": { - "version": "8.6.0", + "version": "8.9.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.9.0.tgz", + "integrity": "sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg==", "requires": {} } } }, "webpack-merge": { "version": "5.8.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", + "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", "requires": { "clone-deep": "^4.0.1", "wildcard": "^2.0.0" } }, "webpack-sources": { - "version": "1.4.3", - "requires": { - "source-list-map": "^2.0.0", - "source-map": "~0.6.1" - } + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==" }, "webpackbar": { "version": "5.0.2", + "resolved": "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz", + "integrity": "sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ==", "requires": { "chalk": "^4.1.0", "consola": "^2.15.3", @@ -30305,12 +39630,16 @@ "dependencies": { "ansi-styles": { "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "chalk": { "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -30318,18 +39647,26 @@ }, "color-convert": { "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { - "version": "1.1.4" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "has-flag": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "supports-color": { "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "requires": { "has-flag": "^4.0.0" } @@ -30338,6 +39675,8 @@ }, "websocket-driver": { "version": "0.7.4", + "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", + "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", "requires": { "http-parser-js": ">=0.5.1", "safe-buffer": ">=5.1.0", @@ -30345,10 +39684,14 @@ } }, "websocket-extensions": { - "version": "0.1.4" + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", + "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==" }, "whatwg-encoding": { "version": "1.0.5", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", + "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", "dev": true, "requires": { "iconv-lite": "0.4.24" @@ -30356,10 +39699,14 @@ }, "whatwg-mimetype": { "version": "2.3.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", "dev": true }, "whatwg-url": { "version": "8.7.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz", + "integrity": "sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==", "dev": true, "requires": { "lodash": "^4.7.0", @@ -30369,12 +39716,16 @@ }, "which": { "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "requires": { "isexe": "^2.0.0" } }, "which-boxed-primitive": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", "requires": { "is-bigint": "^1.0.1", "is-boolean-object": "^1.1.0", @@ -30384,50 +39735,41 @@ } }, "which-typed-array": { - "version": "1.1.7", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.8.tgz", + "integrity": "sha512-Jn4e5PItbcAHyLoRDwvPj1ypu27DJbtdYXUa5zsinrUx77Uvfb0cXwwnGMTn7cjUfhhqgVQnVJCwF+7cgU7tpw==", "requires": { "available-typed-arrays": "^1.0.5", "call-bind": "^1.0.2", - "es-abstract": "^1.18.5", - "foreach": "^2.0.5", + "es-abstract": "^1.20.0", + "for-each": "^0.3.3", "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.7" - } - }, - "wide-align": { - "version": "1.1.5", - "requires": { - "string-width": "^1.0.2 || 2 || 3 || 4" - }, - "dependencies": { - "emoji-regex": { - "version": "8.0.0" - }, - "string-width": { - "version": "4.2.3", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - } + "is-typed-array": "^1.1.9" } }, "widest-line": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", + "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", "requires": { "string-width": "^5.0.1" } }, "wildcard": { - "version": "2.0.0" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==" }, "word-wrap": { "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true }, "wrap-ansi": { "version": "8.0.1", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.0.1.tgz", + "integrity": "sha512-QFF+ufAqhoYHvoHdajT/Po7KoXVBPXS2bgjIam5isfWJPfIOnQZ50JtUiVvCv/sjgacf3yRrt2ZKUZ/V4itN4g==", "requires": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", @@ -30435,13 +39777,19 @@ }, "dependencies": { "ansi-regex": { - "version": "6.0.1" + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" }, "ansi-styles": { - "version": "6.1.0" + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" }, "strip-ansi": { "version": "7.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.0.1.tgz", + "integrity": "sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==", "requires": { "ansi-regex": "^6.0.1" } @@ -30449,10 +39797,14 @@ } }, "wrappy": { - "version": "1.0.2" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "write-file-atomic": { "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", "requires": { "imurmurhash": "^0.1.4", "is-typedarray": "^1.0.0", @@ -30461,14 +39813,20 @@ } }, "ws": { - "version": "7.5.7", + "version": "7.5.9", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", + "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", "requires": {} }, "xdg-basedir": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==" }, "xhr": { "version": "2.6.0", + "resolved": "https://registry.npmjs.org/xhr/-/xhr-2.6.0.tgz", + "integrity": "sha512-/eCGLb5rxjx5e3mF1A7s+pLlR6CGyqWN91fv1JgER5mVWg1MZmlhBvy9kjcsOdRk8RrIujotWyJamfyrp+WIcA==", "requires": { "global": "~4.4.0", "is-function": "^1.0.1", @@ -30478,48 +39836,72 @@ }, "xml-js": { "version": "1.6.11", + "resolved": "https://registry.npmjs.org/xml-js/-/xml-js-1.6.11.tgz", + "integrity": "sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==", "requires": { "sax": "^1.2.4" } }, "xml-name-validator": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-3.0.0.tgz", + "integrity": "sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==", "dev": true }, "xml-parse-from-string": { - "version": "1.0.1" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz", + "integrity": "sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g==" }, "xml2js": { "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", "requires": { "sax": ">=0.6.0", "xmlbuilder": "~11.0.0" } }, "xmlbuilder": { - "version": "11.0.1" + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" }, "xmlchars": { "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", "dev": true }, "xtend": { - "version": "4.0.2" + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "y18n": { - "version": "5.0.8" + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" }, "yallist": { - "version": "4.0.0" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "yaml": { - "version": "1.10.2" + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" }, "yaml-ast-parser": { - "version": "0.0.43" + "version": "0.0.43", + "resolved": "https://registry.npmjs.org/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz", + "integrity": "sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A==" }, "yargs": { "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, "requires": { "cliui": "^7.0.2", @@ -30533,10 +39915,14 @@ "dependencies": { "emoji-regex": { "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true }, "string-width": { "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "requires": { "emoji-regex": "^8.0.0", @@ -30548,13 +39934,29 @@ }, "yargs-parser": { "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true }, + "yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "requires": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, "yocto-queue": { - "version": "0.1.0" + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==" }, "zwitch": { - "version": "1.0.5" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", + "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==" } } } diff --git a/website/package.json b/website/package.json index 3cfdd2c1bfa..9b9f5bfbb96 100644 --- a/website/package.json +++ b/website/package.json @@ -11,16 +11,18 @@ "@docusaurus/preset-classic": "2.0.0-beta.17", "@docusaurus/theme-search-algolia": "2.0.0-beta.17", "@mdx-js/react": "^1.6.21", - "@svgr/webpack": "^5.5.0", + "@svgr/webpack": "^6.0.0", + "axios": "^0.27.2", "classnames": "^2.3.1", "clsx": "^1.1.1", "color": "^3.1.2", - "core-js": "^3.15.2", + "core-js": "^3.20.0", "file-loader": "^6.2.0", "fs": "0.0.2", "gray-matter": "^4.0.3", + "hast-util-is-element": "^1.1.0", "js-yaml": "^4.1.0", - "mobx": "^6.3.7", + "mobx": "^6.3.9", "node-polyfill-webpack-plugin": "^1.1.4", "prism-react-renderer": "^1.2.1", "react": "^17.0.1", @@ -28,8 +30,11 @@ "react-is": "^18.1.0", "react-tooltip": "^4.2.21", "redoc": "^2.0.0-rc.57", + "rehype-katex": "^5.0.0", + "remark-math": "^3.0.1", "slugify": "^1.6.1", "styled-components": "5.3.3", + "swiper": "^8.4.3", "url-loader": "^4.1.1" }, "devDependencies": { @@ -39,6 +44,7 @@ "@testing-library/react": "^12.1.3", "@testing-library/user-event": "^13.5.0", "css-loader": "^3.4.2", + "cypress": "^10.3.0", "dotenv": "^10.0.0", "jest": "^27.5.1", "jest-cli": "^27.5.1", @@ -56,7 +62,10 @@ "testEnvironment": "jsdom", "setupFilesAfterEnv": [ "/jest.setup.js" - ] + ], + "moduleNameMapper": { + "\\.css$": "/static/css/__mocks__/styleMock.js" + } }, "version": "0.0.0" } diff --git a/website/plugins/buildGlobalData/index.js b/website/plugins/buildGlobalData/index.js index 83fee90e9fd..cabeed1cf05 100644 --- a/website/plugins/buildGlobalData/index.js +++ b/website/plugins/buildGlobalData/index.js @@ -18,14 +18,14 @@ module.exports = function buildGlobalDataPlugin(context, options) { // Get custom blog metadata const blogMeta = yaml.load(fs.readFileSync(`blog/metadata.yml`, { encoding: 'utf8' })) - + // Get CTA data const CTAData = yaml.load(fs.readFileSync(`blog/ctas.yml`, { encoding: 'utf8' })) - + // Get versionedPages param // This controls versioning for sidebar const { versionedPages } = options - + // Get all FAQ doc ids // FAQ component uses these to pull file const faqFiles = getDirectoryFiles(`docs/faqs`) @@ -38,7 +38,7 @@ module.exports = function buildGlobalDataPlugin(context, options) { faqFiles } }, - async contentLoaded({content, actions}) { + async contentLoaded({ content, actions }) { const { setGlobalData } = actions; setGlobalData(content); }, diff --git a/website/sidebars.js b/website/sidebars.js index 407393c35b6..87664aabcd8 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -1,307 +1,385 @@ const sidebarSettings = { docs: [ + "docs/introduction", + "docs/supported-data-platforms", { type: "category", - label: "Introduction", - items: ["docs/introduction", "docs/available-adapters", "docs/core-versions"], + label: "Get started with dbt", + collapsed: true, + items: [ + "docs/get-started/getting-started/overview", + { + type: "category", + label: "Get started with dbt Cloud", + collapsed: true, + items: [ + "docs/get-started/getting-started/set-up-dbt-cloud", + { + type: "category", + label: "Getting set up", + items: [ + "docs/get-started/getting-started/getting-set-up/setting-up-bigquery", + "docs/get-started/getting-started/getting-set-up/setting-up-databricks", + "docs/get-started/getting-started/getting-set-up/setting-up-redshift", + "docs/get-started/getting-started/getting-set-up/setting-up-snowflake", + ], + }, + { + type: "category", + label: "Building your first project", + + items: [ + "docs/get-started/getting-started/building-your-first-project/build-your-first-models", + "docs/get-started/getting-started/building-your-first-project/test-and-document-your-project", + "docs/get-started/getting-started/building-your-first-project/schedule-a-job", + ], + }, + { + type: "category", + label: "Learning more", + items: [ + "docs/get-started/learning-more/using-jinja", + "docs/get-started/learning-more/refactoring-legacy-sql", + ], + }, + "docs/get-started/dbt-cloud-features", + "docs/get-started/connect-your-database", + "docs/get-started/develop-in-the-cloud", + "docs/get-started/dbt-cloud-tips", + ], + }, + { + type: "category", + label: "Get started with dbt Core", + collapsed: true, + items: [ + "docs/get-started/getting-started-dbt-core", + { + type: "category", + label: "Install dbt Core", + collapsed: true, + items: [ + "docs/get-started/installation", + "docs/get-started/homebrew-install", + "docs/get-started/pip-install", + "docs/get-started/docker-install", + "docs/get-started/source-install", + ], + }, + "docs/get-started/about-the-cli", + "docs/get-started/connection-profiles", + ], + }, + "docs/get-started/run-your-dbt-projects", + ], }, { type: "category", - label: "Building a dbt Project", - collapsed: false, + label: "Build dbt projects", + collapsed: true, items: [ - "docs/building-a-dbt-project/projects", + "docs/build/projects", { type: "category", - label: "Models", + label: "Build your DAG", + collapsed: true, items: [ - "docs/building-a-dbt-project/building-models", - "docs/building-a-dbt-project/building-models/materializations", - "docs/building-a-dbt-project/building-models/configuring-incremental-models", - "docs/building-a-dbt-project/building-models/using-custom-aliases", - "docs/building-a-dbt-project/building-models/using-custom-schemas", - "docs/building-a-dbt-project/building-models/using-custom-databases", - "docs/building-a-dbt-project/building-models/using-variables", + "docs/build/sources", + { + type: "category", + label: "Models", + items: [ + "docs/build/models", + "docs/build/sql-models", + "docs/build/python-models", + ], + }, + "docs/build/seeds", + "docs/build/snapshots", + "docs/build/exposures", + "docs/build/metrics", + ], + }, + { + type: "category", + label: "Enhance your models", + collapsed: true, + items: [ + "docs/build/tests", + "docs/build/materializations", + "docs/build/incremental-models", + ], + }, + { + type: "category", + label: "Enhance your code", + collapsed: true, + items: [ + "docs/build/jinja-macros", + "docs/build/project-variables", + "docs/build/environment-variables", + "docs/build/packages", + "docs/build/analyses", + ], + }, + { + type: "category", + label: "Organize your outputs", + collapsed: true, + items: [ + "docs/build/custom-schemas", + "docs/build/custom-databases", + "docs/build/custom-aliases", + "docs/build/custom-target-names", + ], + }, + { + type: "category", + label: "Advanced workflows", + collapsed: true, + items: [ + { + type: "category", + label: "dbt Cloud APIs", + items: [ + "docs/dbt-cloud-apis/overview", + { + type: "category", + label: "Authentication", + items: [ + "docs/dbt-cloud-apis/user-tokens", + "docs/dbt-cloud-apis/service-tokens", + ], + }, + "docs/dbt-cloud-apis/admin-cloud-api", + { + type: "category", + label: "Metadata API", + items: [ + "docs/dbt-cloud-apis/metadata-api", + "docs/dbt-cloud-apis/metadata-querying", + { + type: "category", + label: "Schema", + items: [ + "docs/dbt-cloud-apis/metadata-schema-model", + "docs/dbt-cloud-apis/metadata-schema-models", + "docs/dbt-cloud-apis/metadata-schema-modelByEnv", + "docs/dbt-cloud-apis/metadata-schema-metric", + "docs/dbt-cloud-apis/metadata-schema-metrics", + "docs/dbt-cloud-apis/metadata-schema-source", + "docs/dbt-cloud-apis/metadata-schema-sources", + "docs/dbt-cloud-apis/metadata-schema-seed", + "docs/dbt-cloud-apis/metadata-schema-seeds", + "docs/dbt-cloud-apis/metadata-schema-snapshots", + "docs/dbt-cloud-apis/metadata-schema-test", + "docs/dbt-cloud-apis/metadata-schema-tests", + "docs/dbt-cloud-apis/metadata-schema-exposure", + "docs/dbt-cloud-apis/metadata-schema-exposures", + ], + }, + ], + }, + ], + }, + "docs/build/hooks-operations", ], }, - "docs/building-a-dbt-project/tests", - "docs/building-a-dbt-project/documentation", - "docs/building-a-dbt-project/using-sources", - "docs/building-a-dbt-project/seeds", - "docs/building-a-dbt-project/snapshots", - "docs/building-a-dbt-project/exposures", - "docs/building-a-dbt-project/jinja-macros", - "docs/building-a-dbt-project/hooks-operations", //"docs/building-a-dbt-project/dont-nest-your-curlies", //"docs/building-a-dbt-project/archival", - "docs/building-a-dbt-project/package-management", - "docs/building-a-dbt-project/analyses", - "docs/building-a-dbt-project/metrics", ], }, { type: "category", - label: "Running a dbt Project", - collapsed: false, + label: "Deploy dbt projects", + collapsed: true, items: [ - "docs/running-a-dbt-project/using-the-dbt-ide", - "docs/running-a-dbt-project/using-the-cli", - "docs/running-a-dbt-project/dbt-api", - "docs/running-a-dbt-project/running-dbt-in-production", + "docs/deploy/deployments", + "docs/deploy/regions", + { + type: "category", + label: "dbt Cloud deploy options", + items: [ + "docs/deploy/architecture", + "docs/deploy/single-tenant", + "docs/deploy/multi-tenant", + ], + }, + { + type: "category", + label: "dbt Cloud production jobs", + items: [ + "docs/deploy/job-triggers", + "docs/deploy/job-notifications", + "docs/deploy/source-freshness", + "docs/deploy/dashboard-status-tiles", + ], + }, + "docs/deploy/cloud-ci-job", ], }, { type: "category", - label: "Guides", + label: "Collaborate with others", items: [ - "docs/guides/debugging-errors", - "docs/guides/debugging-schema-names", - "docs/guides/navigating-the-docs", - "docs/guides/getting-help", - "docs/guides/best-practices", - "docs/guides/managing-environments", - "docs/guides/writing-custom-generic-tests", - "docs/guides/building-packages", - "docs/guides/creating-new-materializations", - "docs/guides/understanding-state", - { - type: "category", - label: "Migration guides", - link: { - type: 'generated-index', - title: 'Migration guides', - description: 'Learn how to upgrade to the latest version of dbt Core.', - slug: '/docs/guides/migration-guide', - }, + "docs/collaborate/environments", + { + type: "category", + label: "Git version control", items: [ - "docs/guides/migration-guide/upgrading-to-v1.1", - "docs/guides/migration-guide/upgrading-to-v1.0", - "docs/guides/migration-guide/upgrading-to-v0.21", - "docs/guides/migration-guide/upgrading-to-v0.20", + "docs/collaborate/git-version-control", + "docs/collaborate/git/version-control-basics", + "docs/collaborate/git/managed-repository", + "docs/collaborate/git/pr-template", + "docs/collaborate/git/resolve-merge-conflicts", { type: "category", - label: "Older versions", - link: { - type: 'generated-index', - title: 'Migration guides', - description: 'Learn how to upgrade from older versions of dbt Core.', - slug: '/docs/guides/migration-guide/older-versions', - }, + label: "Supported git providers", items: [ - "docs/guides/migration-guide/upgrading-to-0-19-0", - "docs/guides/migration-guide/upgrading-to-0-18-0", - "docs/guides/migration-guide/upgrading-to-0-17-0", - "docs/guides/migration-guide/upgrading-to-0-16-0", - "docs/guides/migration-guide/upgrading-to-0-15-0", - "docs/guides/migration-guide/upgrading-to-0-14-1", - "docs/guides/migration-guide/upgrading-to-0-14-0", - "docs/guides/migration-guide/upgrading-to-0-13-0", - "docs/guides/migration-guide/upgrading-to-0-12-0", - "docs/guides/migration-guide/upgrading-to-0-11-0", + "docs/collaborate/git/connect-github", + "docs/collaborate/git/connect-gitlab", + { + type: "category", + label: "Azure DevOps", + items: [ + "docs/collaborate/git/connect-azure-devops", + "docs/collaborate/git/setup-azure", + "docs/collaborate/git/authenticate-azure", + ], + }, + "docs/collaborate/git/import-a-project-by-git-url", ], }, ], }, - "docs/guides/videos", + { + type: "category", + label: "Document your dbt projects", + items: [ + "docs/collaborate/documentation", + "docs/collaborate/build-and-view-your-docs", + ], + }, + + { + type: "category", + label: "Manage access", + items: [ + "docs/collaborate/manage-access/about-access", + "docs/collaborate/manage-access/seats-and-users", + { + type: "category", + label: "Permissions", + items: [ + "docs/collaborate/manage-access/self-service-permissions", + "docs/collaborate/manage-access/enterprise-permissions", + ], + }, + + { + type: "category", + label: "Single sign-on", + items: [ + "docs/collaborate/manage-access/sso-overview", + "docs/collaborate/manage-access/set-up-sso-saml-2.0", + "docs/collaborate/manage-access/set-up-sso-google-workspace", + "docs/collaborate/manage-access/set-up-sso-azure-active-directory", + ], + }, // SSO + { + type: "category", + label: "OAuth with data platforms", + items: [ + "docs/collaborate/manage-access/set-up-snowflake-oauth", + "docs/collaborate/manage-access/set-up-bigquery-oauth", + ], + }, // oauth + "docs/collaborate/manage-access/audit-log", + ], + }, // Manage access ], }, { type: "category", - label: "Contributing", + label: "Use the dbt Semantic Layer", + collapsed: true, items: [ - "docs/contributing/oss-expectations", - "docs/contributing/contributor-license-agreements", - "docs/contributing/building-a-new-adapter", - "docs/contributing/testing-a-new-adapter", - "docs/contributing/documenting-a-new-adapter", - "docs/contributing/slack-rules-of-the-road", + "docs/use-dbt-semantic-layer/quickstart-semantic-layer", + "docs/use-dbt-semantic-layer/dbt-semantic-layer", + "docs/use-dbt-semantic-layer/setup-dbt-semantic-layer", + "docs/use-dbt-semantic-layer/avail-sl-integrations", ], }, { type: "category", - label: "About", - items: ["docs/about/license", "docs/about/viewpoint"], + label: "Available dbt versions", + items: [ + "docs/dbt-versions/core", + "docs/dbt-versions/upgrade-core-in-cloud", + "docs/dbt-versions/product-lifecycles", + { + type: "category", + label: "dbt Cloud Release Notes", + items: [ + "docs/dbt-versions/dbt-cloud-release-notes", + { + type: "autogenerated", + dirName: "docs/dbt-versions/release-notes", + }, + ], + }, + ], }, - ], - "dbt CLI": [ - "dbt-cli/cli-overview", + "docs/dbt-support", { type: "category", - label: "Installing dbt from the command line", + label: "Frequently asked questions", + link: { + type: "generated-index", + title: "Frequently asked questions", + description: + "Our Frequently Asked Questions (FAQs) section is a space where you can find an answer to some questions we get asked a lot (but that we’re happy to answer!). If you have a question or are still stuck on something, just reach out to us by emailing support@getdbt.com or clicking on the chat widget, and we’ll do our best to help out.", + slug: "/docs/faqs", + }, items: [ - "dbt-cli/install/overview", - "dbt-cli/install/homebrew", - "dbt-cli/install/pip", - "dbt-cli/install/docker", - "dbt-cli/install/from-source", + { + type: "autogenerated", + dirName: "faqs", + }, ], }, - "dbt-cli/configure-your-profile", ], "dbt Cloud": [ { type: "category", label: "Overview", - link: { type: 'doc', id: 'docs/dbt-cloud/cloud-overview' }, - items: [ - 'docs/dbt-cloud/cloud-quickstart', - ], + link: { type: "doc", id: "docs/dbt-cloud/cloud-overview" }, + items: [], }, { type: "category", label: "dbt Cloud IDE", - items: [ - "docs/dbt-cloud/cloud-ide/the-dbt-ide", - "docs/dbt-cloud/cloud-ide/handling-merge-conflicts", - "docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide", - "docs/dbt-cloud/cloud-ide/the-ide-git-button", - ], + items: ["docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide"], }, { type: "category", label: "Configuring dbt Cloud", items: [ - "docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database", - "docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository", - "docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories", - "docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application", - "docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab", - { - type: "category", - label: "Connecting Azure DevOps", - link: { type: 'doc', id: 'docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-azure-devops' }, - items: [ - 'docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure', - 'docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure', - ], - }, - "docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url", "docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version", - "docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions", ], }, { type: "category", label: "Using dbt Cloud", link: { - type: 'generated-index', - title: 'Using dbt Cloud', - description: 'Learn how you can use dbt Cloud.', - slug: '/docs/dbt-cloud', + type: "generated-index", + title: "Using dbt Cloud", + description: "Learn how you can use dbt Cloud.", + slug: "/docs/dbt-cloud", }, items: [ - "docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration", - "docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation", - "docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness", "docs/dbt-cloud/using-dbt-cloud/artifacts", - "docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule", - "docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name", - "docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables", - "docs/dbt-cloud/using-dbt-cloud/cloud-notifications", - "docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles", "docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab", - "docs/dbt-cloud/using-dbt-cloud/cloud-metrics-layer", - ], - }, - { - type: "category", - label: "Access Control", - items: [ - "docs/dbt-cloud/access-control/access-control-overview", - "docs/dbt-cloud/access-control/cloud-seats-and-users", - "docs/dbt-cloud/access-control/self-service-permissions", - "docs/dbt-cloud/access-control/enterprise-permissions", - ], - }, - { - type: "category", - label: "dbt Cloud Enterprise", - items: [ - "docs/dbt-cloud/dbt-cloud-enterprise", - { - type: "category", - label: "Single Sign On", - items: [ - "docs/dbt-cloud/dbt-cloud-enterprise/sso-overview", - "docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-saml-2.0", - "docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite", - "docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory", - ], - }, - { - type: "category", - label: "Database Auth", - items: [ - "docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth", - "docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-setting-up-bigquery-oauth", - ], - }, - "docs/dbt-cloud/dbt-cloud-enterprise/audit-log", - ], - }, - { - type: "category", - label: "dbt Cloud Deployments", - items: [ - "docs/dbt-cloud/deployments/deployment-overview", - "docs/dbt-cloud/deployments/multi-tenant-deployment", - "docs/dbt-cloud/deployments/single-tenant-deployment", - "docs/dbt-cloud/deployments/deployment-architecture", - ], - }, - { - type: "category", - label: "dbt Cloud APIs", - items: [ - "docs/dbt-cloud/dbt-cloud-api/cloud-apis", - { - type: "category", - label: "Authentication", - items: [ - "docs/dbt-cloud/dbt-cloud-api/user-tokens", - "docs/dbt-cloud/dbt-cloud-api/service-tokens", - ], - }, - "docs/dbt-cloud/dbt-cloud-api/admin-cloud-api", - { - type: "category", - label: "Metadata API", - items: [ - "docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview", - "docs/dbt-cloud/dbt-cloud-api/metadata/metadata-querying", - { - type: "category", - label: "Schema", - items: [ - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-model", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-models", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metric", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metrics", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-source", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-sources", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seed", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seeds", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-snapshots", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-test", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-tests", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposure", - "docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposures", - ], - }, - ], - }, - ], - }, - "docs/dbt-cloud/cloud-dbt-cloud-support", - { - type: "category", - label: "dbt Cloud Release Notes", - link: { type: 'doc', id: 'docs/dbt-cloud/release-notes'}, - items: [ - { - type: "autogenerated", - dirName: "docs/dbt-cloud/release-notes", - }, ], }, ], @@ -311,6 +389,7 @@ const sidebarSettings = { label: "Project configs", items: [ "reference/dbt_project.yml", + "reference/dbtignore", "reference/project-configs/analysis-paths", "reference/project-configs/asset-paths", "reference/project-configs/clean-targets", @@ -342,10 +421,17 @@ const sidebarSettings = { "reference/resource-configs/bigquery-configs", "reference/resource-configs/redshift-configs", "reference/resource-configs/snowflake-configs", + "reference/resource-configs/singlestore-configs", "reference/resource-configs/spark-configs", "reference/resource-configs/materialize-configs", "reference/resource-configs/firebolt-configs", "reference/resource-configs/teradata-configs", + "reference/resource-configs/clickhouse-configs", + "reference/resource-configs/mindsdb-configs", + "reference/resource-configs/mssql-configs", + "reference/resource-configs/azuresynapse-configs", + "reference/resource-configs/greenplum-configs", + "reference/resource-configs/impala-configs", ], }, { @@ -360,7 +446,6 @@ const sidebarSettings = { "reference/resource-properties/columns", "reference/resource-properties/config", "reference/resource-properties/description", - "reference/resource-properties/docs", "reference/resource-properties/quote", "reference/resource-properties/tests", ], @@ -373,6 +458,8 @@ const sidebarSettings = { "reference/resource-configs/database", "reference/resource-configs/enabled", "reference/resource-configs/full_refresh", + "reference/resource-configs/grants", + "reference/resource-configs/docs", "reference/resource-configs/persist_docs", "reference/resource-configs/pre-hook-post-hook", "reference/resource-configs/schema", @@ -448,23 +535,19 @@ const sidebarSettings = { { type: "category", label: "For analyses", - items: [ - "reference/analysis-properties", - ], + items: ["reference/analysis-properties"], }, { type: "category", label: "For exposures", - items: [ - "reference/exposure-properties", - ], + items: ["reference/exposure-properties"], }, { type: "category", label: "For macros", items: [ "reference/macro-properties", - "reference/resource-properties/argument-type" + "reference/resource-properties/argument-type", ], }, ], @@ -524,51 +607,63 @@ const sidebarSettings = { label: "Jinja Reference", items: [ { - type: "category", - label: "dbt Jinja functions", - link: { - type: 'generated-index', - title: 'dbt Jinja functions', - description: 'In addition to the standard Jinja library, we\'ve added additional functions and variables to the Jinja context that are useful when working with a dbt project.', - slug: '/reference/dbt-jinja-functions', - }, - items: [ - { - type: "autogenerated", - dirName: "reference/dbt-jinja-functions", + type: "category", + label: "dbt Jinja functions", + link: { + type: "generated-index", + title: "dbt Jinja functions", + description: + "In addition to the standard Jinja library, we've added additional functions and variables to the Jinja context that are useful when working with a dbt project.", + slug: "/reference/dbt-jinja-functions", + }, + items: [ + { + type: "autogenerated", + dirName: "reference/dbt-jinja-functions", + }, + ], }, - ], - }, "reference/dbt-classes", ], }, { type: "category", - label: "Profiles (CLI only)", + label: "Setups (CLI only)", items: [ "reference/profiles.yml", - "reference/warehouse-profiles/bigquery-profile", - "reference/warehouse-profiles/postgres-profile", - "reference/warehouse-profiles/redshift-profile", - "reference/warehouse-profiles/snowflake-profile", - "reference/warehouse-profiles/mssql-profile", - "reference/warehouse-profiles/trino-profile", - "reference/warehouse-profiles/singlestore-profile", - "reference/warehouse-profiles/spark-profile", - "reference/warehouse-profiles/databricks-profile", - "reference/warehouse-profiles/exasol-profile", - "reference/warehouse-profiles/oracle-profile", - "reference/warehouse-profiles/azuresynapse-profile", - "reference/warehouse-profiles/dremio-profile", - "reference/warehouse-profiles/clickhouse-profile", - "reference/warehouse-profiles/materialize-profile", - "reference/warehouse-profiles/rockset-profile", - "reference/warehouse-profiles/firebolt-profile", - "reference/warehouse-profiles/teradata-profile", - "reference/warehouse-profiles/athena-profile", - "reference/warehouse-profiles/vertica-profile", - "reference/warehouse-profiles/tidb-profile", - "reference/warehouse-profiles/glue-profile", + "reference/warehouse-setups/bigquery-setup", + "reference/warehouse-setups/postgres-setup", + "reference/warehouse-setups/redshift-setup", + "reference/warehouse-setups/snowflake-setup", + "reference/warehouse-setups/mssql-setup", + "reference/warehouse-setups/trino-setup", + "reference/warehouse-setups/singlestore-setup", + "reference/warehouse-setups/spark-setup", + "reference/warehouse-setups/databricks-setup", + "reference/warehouse-setups/hive-setup", + "reference/warehouse-setups/exasol-setup", + "reference/warehouse-setups/oracle-setup", + "reference/warehouse-setups/azuresynapse-setup", + "reference/warehouse-setups/dremio-setup", + "reference/warehouse-setups/clickhouse-setup", + "reference/warehouse-setups/materialize-setup", + "reference/warehouse-setups/rockset-setup", + "reference/warehouse-setups/firebolt-setup", + "reference/warehouse-setups/teradata-setup", + "reference/warehouse-setups/athena-setup", + "reference/warehouse-setups/vertica-setup", + "reference/warehouse-setups/tidb-setup", + "reference/warehouse-setups/glue-setup", + "reference/warehouse-setups/mindsdb-setup", + "reference/warehouse-setups/greenplum-setup", + "reference/warehouse-setups/impala-setup", + "reference/warehouse-setups/layer-setup", + "reference/warehouse-setups/iomete-setup", + "reference/warehouse-setups/duckdb-setup", + "reference/warehouse-setups/sqlite-setup", + "reference/warehouse-setups/mysql-setup", + "reference/warehouse-setups/ibmdb2-setup", + "reference/warehouse-setups/alloydb-setup", ], }, { @@ -583,66 +678,238 @@ const sidebarSettings = { "reference/artifacts/other-artifacts", ], }, + , + { + type: "category", + label: "Database Permissions", + items: ["reference/snowflake-permissions"], + }, ], - tutorial: [ + guides: [ + { + type: "category", + label: "Best practices", + link: { + type: "generated-index", + title: "Best practice guides", + description: + "Learn how dbt Labs approaches building projects through our current viewpoints on structure, style, and setup.", + slug: "/guides/best-practices", + }, + items: [ + { + type: "category", + label: "How we structure our dbt projects", + link: { + type: "doc", + id: "guides/best-practices/how-we-structure/1-guide-overview", + }, + items: [ + "guides/best-practices/how-we-structure/2-staging", + "guides/best-practices/how-we-structure/3-intermediate", + "guides/best-practices/how-we-structure/4-marts", + "guides/best-practices/how-we-structure/5-the-rest-of-the-project", + ], + }, + ], + }, { type: "category", - label: "Getting Started with dbt Cloud", - link: { type: 'doc', id: 'tutorial/getting-started' }, + label: "Orchestration", + link: { + type: "generated-index", + title: "Orchestration guides", + description: + "Learn how to orchestrate your data transformations in dbt, using dbt Cloud, a variety of popular tools, or both working together.", + slug: "/guides/orchestration", + }, items: [ { type: "category", - label: "Getting set up", - link: { type: 'doc', id: 'tutorial/getting-set-up' }, + label: "Airflow and dbt Cloud", + link: { + type: "doc", + id: "guides/orchestration/airflow-and-dbt-cloud/1-airflow-and-dbt-cloud", + }, items: [ - "tutorial/getting-set-up/setting-up-bigquery", - "tutorial/getting-set-up/setting-up-databricks", - "tutorial/getting-set-up/setting-up-redshift", - "tutorial/getting-set-up/setting-up-snowflake", + "guides/orchestration/airflow-and-dbt-cloud/2-setting-up-airflow-and-dbt-cloud", + "guides/orchestration/airflow-and-dbt-cloud/3-running-airflow-and-dbt-cloud", + "guides/orchestration/airflow-and-dbt-cloud/4-airflow-and-dbt-cloud-faqs", ], }, { type: "category", - label: "Building your first project", - link: { type: 'doc', id: 'tutorial/building-your-first-project' }, + label: "Customizing CI/CD", + link: { + type: "doc", + id: "guides/orchestration/custom-cicd-pipelines/1-cicd-background", + }, items: [ - "tutorial/building-your-first-project/build-your-first-models", - "tutorial/building-your-first-project/test-and-document-your-project", - "tutorial/building-your-first-project/schedule-a-job", + "guides/orchestration/custom-cicd-pipelines/2-lint-on-push", + "guides/orchestration/custom-cicd-pipelines/3-dbt-cloud-job-on-merge", + "guides/orchestration/custom-cicd-pipelines/4-something-to-consider", ], }, ], }, { type: "category", - label: "Learning more", - link: { type: 'doc', id: 'tutorial/learning-more' }, + label: "Migration", items: [ - "tutorial/learning-more/getting-started-dbt-core", - "tutorial/learning-more/using-jinja", - "tutorial/learning-more/refactoring-legacy-sql", + { + type: "category", + label: "Versions", + link: { + type: "generated-index", + title: "Version migration guides", + description: + "Learn how to upgrade to the latest version of dbt Core.", + slug: "/guides/migration/versions", + }, + items: [ + { + type: "autogenerated", + dirName: "guides/migration/versions", + }, + ], + }, + { + type: "category", + label: "Tools", + link: { + type: "generated-index", + title: "Tool migration guides", + description: + "Learn how to migrate to dbt from other tools and platforms.", + slug: "/guides/migration/tools", + }, + items: [ + { + type: "category", + label: "Migrating from stored procedures", + link: { + type: "doc", + id: "guides/migration/tools/migrating-from-stored-procedures/1-migrating-from-stored-procedures", + }, + items: [ + "guides/migration/tools/migrating-from-stored-procedures/2-inserts", + "guides/migration/tools/migrating-from-stored-procedures/3-updates", + "guides/migration/tools/migrating-from-stored-procedures/4-deletes", + "guides/migration/tools/migrating-from-stored-procedures/5-merges", + "guides/migration/tools/migrating-from-stored-procedures/6-migrating-from-stored-procedures-conclusion", + ], + }, + "guides/migration/tools/migrating-from-spark-to-databricks", + ], + }, + ], + }, + { + type: "category", + label: "Advanced", + items: [ + { + type: "category", + label: "Adapter development", + items: [ + "guides/advanced/adapter-development/1-what-are-adapters", + "guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter", + "guides/advanced/adapter-development/3-building-a-new-adapter", + "guides/advanced/adapter-development/4-testing-a-new-adapter", + "guides/advanced/adapter-development/5-documenting-a-new-adapter", + "guides/advanced/adapter-development/6-promoting-a-new-adapter", + "guides/advanced/adapter-development/7-verifying-a-new-adapter", + ], + }, + ], + }, + { + type: "category", + label: "Legacy", + items: [ + "guides/legacy/debugging-errors", + "guides/legacy/debugging-schema-names", + "guides/legacy/getting-help", + "guides/legacy/best-practices", + "guides/legacy/writing-custom-generic-tests", + "guides/legacy/building-packages", + "guides/legacy/creating-new-materializations", + "guides/legacy/understanding-state", + "guides/legacy/videos", ], }, ], - "Glossary": [ + community: [ + { + type: "doc", + id: "community/join", + }, + { + type: "category", + label: "Contributing", + link: { + type: "doc", + id: "community/contribute", + }, + items: [ + { + type: "doc", + label: "Become a contributor", + id: "community/contribute", + }, + "community/contributing/contributing-writing", + "community/contributing/contributing-coding", + "community/contributing/contributing-online-community", + "community/contributing/contributing-realtime-events", + ], + }, + { + type: "link", + label: "Community Forum", + href: "/community/forum", + }, + { + type: "link", + label: "Events", + href: "/community/events", + }, + { + type: "category", + label: "Additional resources", + items: [ + "community/resources/viewpoint", + "community/resources/code-of-conduct", + "community/resources/slack-rules-of-the-road", + "community/resources/maintaining-a-channel", + "community/resources/vendor-guidelines", + "community/resources/forum-guidelines", + "community/resources/organizing-inclusive-events", + "community/resources/oss-expectations", + "community/resources/oss-projects", + "community/resources/contributor-license-agreements", + "community/resources/speaking-at-a-meetup", + ], + }, + ], + Glossary: [ { type: "category", label: "Analytics Engineering Glossary", link: { - type: 'generated-index', - title: 'Analytics Engineering Glossary', - description: 'The Analytics Engineering Glossary is a living collection of terms & concepts commonly used in the data industry. You can use and contribute to this resource to educate yourself, your team, and your stakeholders.', - slug: '/glossary', + type: "generated-index", + title: "Analytics Engineering Glossary", + description: + "The Analytics Engineering Glossary is a living collection of terms & concepts commonly used in the data industry. You can use and contribute to this resource to educate yourself, your team, and your stakeholders.", + slug: "/glossary", }, items: [ { - type: 'autogenerated', - dirName: 'terms' + type: "autogenerated", + dirName: "terms", }, ], }, ], }; - -module.exports = sidebarSettings +module.exports = sidebarSettings; diff --git a/website/snippets/available-beta-banner-metadata.md b/website/snippets/available-beta-banner-metadata.md new file mode 100644 index 00000000000..3ed785e95cb --- /dev/null +++ b/website/snippets/available-beta-banner-metadata.md @@ -0,0 +1,3 @@ +:::info Beta feature +This feature is currently in beta and subject to change. If you want to provide feedback, please [contact us](mailto:metadata@dbtlabs.com). +::: diff --git a/website/snippets/available-beta-banner.md b/website/snippets/available-beta-banner.md new file mode 100644 index 00000000000..15d365a84b1 --- /dev/null +++ b/website/snippets/available-beta-banner.md @@ -0,0 +1,3 @@ +:::info Beta feature +This feature is currently in beta and subject to change. If you are interested in getting access to the beta, please [contact us](mailto:support@getdbt.com). +::: diff --git a/website/snippets/available-enterprise-tier-only.md b/website/snippets/available-enterprise-tier-only.md new file mode 100644 index 00000000000..0d75b72287e --- /dev/null +++ b/website/snippets/available-enterprise-tier-only.md @@ -0,0 +1,7 @@ +:::caution Available for dbt Cloud Enterprise + +Connecting an Azure DevOps cloud account is available for organizations using the dbt Cloud Enterprise tier. + +Azure DevOps on-premise instances are not supported in dbt Cloud. + +::: diff --git a/website/snippets/available-prerelease-banner.md b/website/snippets/available-prerelease-banner.md new file mode 100644 index 00000000000..3531a2f646f --- /dev/null +++ b/website/snippets/available-prerelease-banner.md @@ -0,0 +1,7 @@ +:::info Release candidate +dbt Core v1.2 is now available as a **release candidate**. + +For more information on prereleases, see ["About Core versions: Trying prereleases"](core-versions#trying-prereleases). + +Join the [#dbt-prereleases](https://getdbt.slack.com/archives/C016X6ABVUK) channel in the Community Slack so you can be the first to read about prereleases as soon as they're available! +::: diff --git a/website/snippets/core-versions-table.md b/website/snippets/core-versions-table.md index de60f13884e..7ecf61e5b96 100644 --- a/website/snippets/core-versions-table.md +++ b/website/snippets/core-versions-table.md @@ -1,9 +1,10 @@ | dbt Core | Initial Release | Active Support Until | Critical Support Until | dbt Cloud Until | Final Patch | |---------------------------------|-----------------|----------------------|-------------------------|-----------------|-------------| -| **v0.X** | (various dates) | v1.0.0 release | Dec 3, 2021 | Jun 30, 2022 ⚠️ | v0.21.1 | -| [**v1.0**](upgrading-to-v1.0) | Dec 3, 2021 | v1.1.0 release | Dec 3, 2022 | Dec 3, 2022 | | -| [**v1.1**](upgrading-to-v1.1) | Apr 28, 2022 | v1.2.0 release | Apr 28, 2023 | Apr 28, 2023 | | -| _**v1.2**_ | _Jul 2022_ | _v1.3.0 release_ | _Jul 2023_ | _Jul 2023_ | | -| _**v1.3**_ | _Oct 2022_ | _v1.4.0 release_ | _Oct 2023_ | _Oct 2023_ | | +| **v0.X** | (various dates) | v1.0.0 release | Dec 3, 2021 | See below ⚠️ | v0.21.1 | +| [**v1.0**](upgrading-to-v1.0) | Dec 3, 2021 | v1.1.0 release | Dec 3, 2022 | Dec 2022 | | +| [**v1.1**](upgrading-to-v1.1) | Apr 28, 2022 | v1.2.0 release | Apr 28, 2023 | Apr 2023 | | +| [**v1.2**](upgrading-to-v1.2) | Jul 26, 2022 | v1.3.0 release | Jul 26, 2023 | Jul 2023 | | +| [**v1.3**](upgrading-to-v1.3) | Oct 12, 2022 | v1.4.0 release | Oct 12, 2023 | Oct 2023 | | +| _**v1.4**_ | _Jan 2023_ | _v1.5.0 release_ | _Jan 2024_ | _Jan 2024_ | | _Italics: Future releases, NOT definite commitments. Shown for indication only._ diff --git a/website/snippets/discourse-help-feed-header.md b/website/snippets/discourse-help-feed-header.md new file mode 100644 index 00000000000..ef50864e7f1 --- /dev/null +++ b/website/snippets/discourse-help-feed-header.md @@ -0,0 +1 @@ +### Questions from the Community \ No newline at end of file diff --git a/website/snippets/grants-vs-access-to.md b/website/snippets/grants-vs-access-to.md new file mode 100644 index 00000000000..6304a0fea78 --- /dev/null +++ b/website/snippets/grants-vs-access-to.md @@ -0,0 +1,12 @@ + + +:::info Note +The `grants` config and the `grant_access_to` config are distinct. + +- **`grant_access_to`:** Enables you to set up authorized views. When configured, dbt provides an authorized view access to show partial information from other datasets, without providing end users with full access to those underlying datasets. For more information, see ["BigQuery configurations: Authorized views"](/reference/resource-configs/bigquery-configs#authorized-views) +- **`grants`:** Provides specific permissions to users, groups, or service accounts for managing access to datasets you're producing with dbt. For more information, see ["Resource configs: grants"](resource-configs/grants) + +You can use the two features together: "authorize" a view model with the `grants_access_to` configuration, and then add `grants` to that view model to share its query results (and _only_ its query results) with other users, groups, or service accounts. +::: + + diff --git a/website/snippets/hooks-to-grants.md b/website/snippets/hooks-to-grants.md new file mode 100644 index 00000000000..3bbdd04879f --- /dev/null +++ b/website/snippets/hooks-to-grants.md @@ -0,0 +1,5 @@ + + +In older versions of dbt, the most common use of `post-hook` was to execute `grant` statements, to apply database permissions to models right after creating them. Starting in v1.2, we recommend using the [`grants` resource config](/reference/resource-configs/grants) instead, in order to automatically apply grants when your dbt model runs. + + diff --git a/website/snippets/ide-ga-banner.md b/website/snippets/ide-ga-banner.md new file mode 100644 index 00000000000..2074e1bead6 --- /dev/null +++ b/website/snippets/ide-ga-banner.md @@ -0,0 +1,7 @@ +:::info 📌 + +The new and refreshed Cloud IDE is now GA. It includes performance upgrades, ergonomics improvements, and some delightful enhancements! + +Review the [new Cloud IDE features](/docs/get-started/dbt-cloud-features#ide-features) to learn more. + +::: diff --git a/website/snippets/sl-considerations-banner.md b/website/snippets/sl-considerations-banner.md new file mode 100644 index 00000000000..33cfb5edac5 --- /dev/null +++ b/website/snippets/sl-considerations-banner.md @@ -0,0 +1,8 @@ +:::caution Considerations + +Some important considerations to know about using the dbt Semantic Layer during the Public Preview: + +- Support for Snowflake data platform only (_additional data platforms coming soon_) +- Support for the deployment environment only (_development experience coming soon_) + +::: diff --git a/website/snippets/sl-prerequisites.md b/website/snippets/sl-prerequisites.md new file mode 100644 index 00000000000..41076565ad3 --- /dev/null +++ b/website/snippets/sl-prerequisites.md @@ -0,0 +1,25 @@ + + +- Have a multi-tenant dbt Cloud account, hosted in North America. + * Team and Enterprise accounts will be able to set up the Semantic Layer and Metadata API in the integrated partner tool to import metric definition. + * Developer accounts will be able to query the Proxy Server using SQL, but will not be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API.
    +- Have both your production and development environments running dbt version 1.3 or higher
    +- Use Snowflake data platform
    +- Install the dbt metrics package version ">=1.3.0", "<1.4.0" in your dbt project
    +- Set up the Metadata API in the integrated tool to import metric definitions
    +- Recommended - Review the dbt metrics page and Getting started with the dbt Semantic Layer blog
    + +
    + + + +- Have a multi-tenant dbt Cloud account, hosted in North America. + * Team and Enterprise accounts will be able to set up the Semantic Layer and Metadata API in the integrated partner tool to import metric definition. + * Developer accounts will be able to query the Proxy Server using SQL, but will not be able to browse pre-populated dbt metrics in external tools, which requires access to the Metadata API.
    +- Have both your production and development environments running dbt version 1.2 (latest)
    +- Use Snowflake data platform
    +- Install the dbt metrics package version ">=1.3.0", "<1.4.0" in your dbt project
    +- Set up the Metadata API in the integrated tool to import metric definitions
    +- Recommended - Review the dbt metrics page and Getting started with the dbt Semantic Layer blog
    + +
    diff --git a/website/snippets/sl-public-preview-banner.md b/website/snippets/sl-public-preview-banner.md new file mode 100644 index 00000000000..6d7b197b88d --- /dev/null +++ b/website/snippets/sl-public-preview-banner.md @@ -0,0 +1,7 @@ +:::info 📌 + +The dbt Semantic Layer is currently available in Public Preview for multi-tenant dbt Cloud accounts hosted in North America. If you log in via https://cloud.getdbt.com/, you can access the Semantic Layer. If you log in with [another URL](/docs/deploy/regions), the dbt Semantic Layer will be available in the future. + +For more info, review the [Prerequisites](/docs/use-dbt-semantic-layer/dbt-semantic-layer#prerequisites), [Public Preview](/docs/use-dbt-semantic-layer/quickstart-semantic-layer#public-preview), and [Product architecture](/docs/use-dbt-semantic-layer/dbt-semantic-layer#product-architecture) sections. + +::: diff --git a/website/snippets/sl-set-up-steps.md b/website/snippets/sl-set-up-steps.md new file mode 100644 index 00000000000..3d2a531b773 --- /dev/null +++ b/website/snippets/sl-set-up-steps.md @@ -0,0 +1,30 @@ + +Before you continue with the following steps, you **must** have a multi-tenant dbt Cloud account hosted in North America. + * Team and Enterprise accounts can set up the Semantic Layer and [Metadata API](/docs/dbt-cloud-apis/metadata-api) in the integrated partner tool to import metric definition. + * Developer accounts can query the Proxy Server using SQL but won't be able to browse dbt metrics in external tools, which requires access to the Metadata API. + +You can set up the dbt Semantic Layer in dbt Cloud at the environment level by following these steps: + +1. Login to your dbt Cloud account +2. Go to **Account Settings**, and then **Service Tokens** to create a new create a [service account API token](docs/dbt-cloud-apis/service-tokens). Save your token somewhere safe. +3. Assign permissions to service account tokens depending on the integration tool you choose. You can review the [integration partner documentation](https://www.getdbt.com/product/semantic-layer-integrations) to determine the permission sets you need to assign. +4. Go to **Deploy** and then **Environment**, and select your **Deployment** environment. +5. Click on **Settings** on the top right side of the page. +6. Click **Edit** on the top right side of the page. +7. Select dbt version 1.2 (latest) or higher. +8. Toggle the Semantic Layer **On**. +9. Copy the full proxy server URL (like `https://eagle-hqya7.proxy.cloud.getdbt.com`) to connect to your [integrated partner tool](https://www.getdbt.com/product/semantic-layer-integrations). +10. Use the URL in the data source configuration of the integrated partner tool. +11. Use the data platform login credentials that make sense for how the data is consumed. + +:::info📌 + +Note - It is _not_ recommended that you use your dbt Cloud credentials due to elevated permissions. Instead, you can use your specific integration tool permissions. + +::: + +12. Set up the [Metadata API](docs/dbt-cloud-apis/metadata-api) (Team and Enterprise accounts only) in the integrated partner tool to import the metric definitions. The [integrated partner tool](https://www.getdbt.com/product/semantic-layer-integrations) will treat the dbt Server as another data source (like a data platform). This requires: + +- The account ID, environment ID, and job ID (visible in the job URL) +- An [API service token](/docs/dbt-cloud-apis/service-tokens) with job admin and metadata permissions +- Add the items above to the relevant fields in your integration tool diff --git a/website/snippets/slack-notifications-config-steps.md b/website/snippets/slack-notifications-config-steps.md new file mode 100644 index 00000000000..e643d4c5644 --- /dev/null +++ b/website/snippets/slack-notifications-config-steps.md @@ -0,0 +1,28 @@ +Setting up Slack notifications in dbt Cloud enables you to receive alerts in a chosen Slack channel when a job run succeeds, fails, or is cancelled. + +:::info Note +Currently, Slack notifications can only be configured by one user to one Slack channel. Additionally, you must be an admin of the Slack workspace in order to configure Slack notifications. + +If there have been changes to the user roles and you need to move ownership, please reach out to support@getdbt.com and provide the support team with the necessary information needed to make this change for you. +::: +### Setup the integration + +1. Click the gear in the top right and select **Profile**. +2. Click **Integrations** to the left. + +3. Click **Link your Slack profile** + +4. Allow dbt Labs to access the Slack workspace. If you are a member of multiple, you can select the appropriate workspace from the dropdown menu in the top right corner. + + +### Configure the notifications + +1. Click the gear in the top right and select **Account Settings**. +2. Click **Slack Notifications** to the left and click **Edit** to the right. + +3. You can find the Slack notification settings at the bottom of the page. + +### Disabling the Slack integration + +To disable the integration entirely, navigate back to the Integrations page and click **Disconnect Account** in the Slack pane. Confirm the disconnect, and the option will revert to its original state. + diff --git a/website/snippets/snowflake-acct-name.md b/website/snippets/snowflake-acct-name.md new file mode 100644 index 00000000000..7ab7bbd106f --- /dev/null +++ b/website/snippets/snowflake-acct-name.md @@ -0,0 +1,2 @@ +  ✅ `db5261993` or `db5261993.east-us-2.azure`
    ❌ `db5261993.eu-central-1.snowflakecomputing.com` diff --git a/website/snippets/tutorial-add-tests-to-models.md b/website/snippets/tutorial-add-tests-to-models.md index 4c5617a98c6..937a444a1f0 100644 --- a/website/snippets/tutorial-add-tests-to-models.md +++ b/website/snippets/tutorial-add-tests-to-models.md @@ -1,4 +1,4 @@ -Adding [tests](/docs/building-a-dbt-project/tests) to a project helps validate that your models are working correctly. +Adding [tests](/docs/build/tests) to a project helps validate that your models are working correctly. To add tests to your project: @@ -52,11 +52,10 @@ When you run `dbt test`, dbt iterates through your YAML files, and constructs a #### FAQs - - - - - - - - + + + + + + + diff --git a/website/snippets/tutorial-build-models-atop-other-models.md b/website/snippets/tutorial-build-models-atop-other-models.md index 18ace58b8fc..6ca4dd20ed8 100644 --- a/website/snippets/tutorial-build-models-atop-other-models.md +++ b/website/snippets/tutorial-build-models-atop-other-models.md @@ -11,7 +11,7 @@ Now you can experiment by separating the logic out into separate models and usin -
    +
    @@ -42,7 +42,7 @@ Now you can experiment by separating the logic out into separate models and usin
    -
    +
    @@ -73,7 +73,7 @@ Now you can experiment by separating the logic out into separate models and usin
    -
    +
    @@ -104,7 +104,7 @@ Now you can experiment by separating the logic out into separate models and usin
    -
    +
    @@ -195,8 +195,9 @@ Now you can experiment by separating the logic out into separate models and usin This time, when you performed a `dbt run`, separate views/tables were created for `stg_customers`, `stg_orders` and `customers`. dbt inferred the order to run these models. Because `customers` depends on `stg_customers` and `stg_orders`, dbt builds `customers` last. You do not need to explicitly define these dependencies. -### FAQs - - - +### FAQs {#faq-2} + + + + diff --git a/website/snippets/tutorial-change-way-model-materialized.md b/website/snippets/tutorial-change-way-model-materialized.md index 0412ba76dbb..395f3ea2e54 100644 --- a/website/snippets/tutorial-change-way-model-materialized.md +++ b/website/snippets/tutorial-change-way-model-materialized.md @@ -2,26 +2,36 @@ One of the most powerful features of dbt is that you can change the way a model By default, everything gets created as a view. You can override that by materializing everything in jaffle_shop as a table. Everything in the example project will still be materialized as a view. -1. Edit your `dbt_project.yml` file: +1. Edit your `dbt_project.yml` file. + - Update your project `name` to: + - + ```yaml + name: 'jaffle_shop' + ``` - ```yaml - models: - jaffle_shop: - +materialized: table - example: - +materialized: view - ``` + + - Update your `models` config block to: - + + + ```yaml + models: + jaffle_shop: + +materialized: table + example: + +materialized: view + ``` + + + - Click **Save**. 2. Enter the `dbt run` command. Your `customers` model should now be built as a table! :::info To do this, dbt had to first run a `drop view` statement (or API call on BigQuery), then a `create table as` statement. ::: -3. Edit `models/customers.sql` to override the `dbt_project.yml` for the `customers` model only by adding the following snippet to the top: +3. Edit `models/customers.sql` to override the `dbt_project.yml` for the `customers` model only by adding the following snippet to the top, and click **Save**: @@ -49,6 +59,6 @@ By default, everything gets created as a view. You can override that by material ### FAQs - - - + + + diff --git a/website/snippets/tutorial-create-new-dbt-cloud-account.md b/website/snippets/tutorial-create-new-dbt-cloud-account.md index 411a6fb0e48..bdde874d0c9 100644 --- a/website/snippets/tutorial-create-new-dbt-cloud-account.md +++ b/website/snippets/tutorial-create-new-dbt-cloud-account.md @@ -1,10 +1,10 @@ Let's start this section by creating a dbt Cloud account if you haven't already. 1. Navigate to [dbt Cloud](https://cloud.getdbt.com). -2. If you don't have a dbt account, create a new one and verify your account via email. -3. If you have a dbt Cloud account, you can create a new project from your existing account: - 1. Click ![hamburger icon](/img/hamburger-icon.png), then click **Account Settings**. - 2. Click **New project**, then click **Begin**. -4. You can now use the Set Up a New Project workflow to set up your first dbt Cloud project! +2. If you don't have a dbt Cloud account, create a new one, and verify your account via email. +3. If you already have a dbt Cloud account, you can create a new project from your existing account: + 1. Click the gear icon in the top-right, then click **Projects**. + 2. Click **+ New Project**. +4. You've arrived at the "Setup a New Project" page. 5. Type "Analytics" in the dbt Project Name field. You will be able to rename this project later. 6. Click **Continue**. \ No newline at end of file diff --git a/website/snippets/tutorial-delete-example-models.md b/website/snippets/tutorial-delete-example-models.md index bc613915aed..e290877437c 100644 --- a/website/snippets/tutorial-delete-example-models.md +++ b/website/snippets/tutorial-delete-example-models.md @@ -31,5 +31,5 @@ You can now delete the files that dbt created when you initialized the project: #### FAQs - - + + diff --git a/website/snippets/tutorial-document-your-models.md b/website/snippets/tutorial-document-your-models.md index 58c183b2355..dd9e1592145 100644 --- a/website/snippets/tutorial-document-your-models.md +++ b/website/snippets/tutorial-document-your-models.md @@ -1,4 +1,4 @@ -Adding [documentation](/docs/building-a-dbt-project/documentation) to your project allows you to describe your models in rich detail, and share that information with your team. Here, we're going to add some basic documentation to our project. +Adding [documentation](/docs/collaborate/documentation) to your project allows you to describe your models in rich detail, and share that information with your team. Here, we're going to add some basic documentation to our project. 1. Update your `models/schema.yml` file to include some descriptions, such as those below. @@ -45,4 +45,4 @@ Adding [documentation](/docs/building-a-dbt-project/documentation) to your proje -2. Run `dbt docs generate` to generate the documentation for your project. dbt introspects your project and your warehouse to generate a json file with rich documentation about your project. +2. Run `dbt docs generate` to generate the documentation for your project. dbt introspects your project and your warehouse to generate a file with rich documentation about your project. diff --git a/website/snippets/tutorial-initiate-project.md b/website/snippets/tutorial-initiate-project.md index 0fc1f84a9ec..8711e4d2747 100644 --- a/website/snippets/tutorial-initiate-project.md +++ b/website/snippets/tutorial-initiate-project.md @@ -1,6 +1,6 @@ -Now that you have a repository configured, you can initialize your project and start development in the dbt Cloud IDE: +Now that you have a repository configured, you can initialize your project and start development in dbt Cloud: -1. Click **Start Developing**. You can also navigate to the Develop page from ![hamburger menu](/img/hamburger-icon.png) in the top left. It might take a few minutes for your project to spin up for the first time, as it established your git connection, clones your repo, and tests the connection to the warehouse. +1. Click **Develop** from the upper left. It might take a few minutes for your project to spin up for the first time as it establishes your git connection, clones your repo, and tests the connection to the warehouse. 2. Above the file tree to the left, click **Initialize your project**. This builds out your folder structure with example models. 3. Make your initial commit by clicking **Commit**. Use the commit message `initial commit`. This creates the first commit to your managed repo and allows you to open a branch where you can add new dbt code. 4. Now you should be able to **directly query data from your warehouse** and **execute dbt run**. You can try this out now: @@ -8,7 +8,7 @@ Now that you have a repository configured, you can initialize your project and s -
    +
    ```sql select * from `dbt-tutorial.jaffle_shop.customers` @@ -16,7 +16,7 @@ select * from `dbt-tutorial.jaffle_shop.customers`
    -
    +
    ```sql select * from default.jaffle_shop_customers @@ -24,7 +24,7 @@ select * from default.jaffle_shop_customers
    -
    +
    ```sql select * from jaffle_shop_customers @@ -32,7 +32,7 @@ select * from jaffle_shop_customers
    -
    +
    ```sql select * from raw.jaffle_shop.customers diff --git a/website/snippets/tutorial-next-steps-tests.md b/website/snippets/tutorial-next-steps-tests.md index ee4277b694e..39764cede0a 100644 --- a/website/snippets/tutorial-next-steps-tests.md +++ b/website/snippets/tutorial-next-steps-tests.md @@ -2,4 +2,4 @@ Before moving on from testing, make a change and see how it affects your results * Write a test that fails, for example, omit one of the order statuses in the `accepted_values` list. What does a failing test look like? Can you debug the failure? * Run the tests for one model only. If you grouped your `stg_` models into a directory, try running the tests for all the models in that directory. -* Use a [docs block](/docs/building-a-dbt-project/documentation#using-docs-blocks) to add a Markdown description to a model. +* Use a [docs block](/docs/collaborate/documentation#using-docs-blocks) to add a Markdown description to a model. diff --git a/website/snippets/tutorial-sql-query.md b/website/snippets/tutorial-sql-query.md index 5d11b085ecf..b39373608e8 100644 --- a/website/snippets/tutorial-sql-query.md +++ b/website/snippets/tutorial-sql-query.md @@ -1,7 +1,7 @@ -
    +
    ```sql with customers as ( @@ -65,7 +65,7 @@ select * from final -
    +
    ```sql with customers as ( @@ -129,7 +129,7 @@ select * from final -
    +
    ```sql with customers as ( @@ -193,7 +193,7 @@ select * from final -
    +
    ```sql with customers as ( @@ -255,4 +255,4 @@ select * from final
    - \ No newline at end of file + diff --git a/website/snippets/what-is-dbt-intro.md b/website/snippets/what-is-dbt-intro.md new file mode 100644 index 00000000000..6826dafb852 --- /dev/null +++ b/website/snippets/what-is-dbt-intro.md @@ -0,0 +1 @@ +dbt is a transformation workflow that helps you get more work done while producing higher quality results. You can use dbt to modularize and centralize your analytics code, while also providing your data team with guardrails typically found in software engineering workflows. Collaborate on data models, version them, and test and document your queries before safely deploying them to production, with monitoring and visibility. diff --git a/website/src/components/blogPostCard/index.js b/website/src/components/blogPostCard/index.js new file mode 100644 index 00000000000..835ca57f3b8 --- /dev/null +++ b/website/src/components/blogPostCard/index.js @@ -0,0 +1,27 @@ +import React from 'react'; +import styles from './styles.module.css'; +import useBaseUrl from '@docusaurus/useBaseUrl'; +import Link from '@docusaurus/Link'; +import createPostPreview from '@site/functions/post-preview'; + + +function BlogPostCard({ postMetaData }) { + const { title, date, readingTime, description, link, image } = postMetaData + return ( +
    +
    + {image &&
    } +
    +

    {title}

    + {readingTime && {date} · {readingTime} minute read} +

    + {createPostPreview(description, 140)} +

    + Read more +
    +
    +
    + ); +} + +export default BlogPostCard; diff --git a/website/src/components/blogPostCard/styles.module.css b/website/src/components/blogPostCard/styles.module.css new file mode 100644 index 00000000000..f2a61f2ebb6 --- /dev/null +++ b/website/src/components/blogPostCard/styles.module.css @@ -0,0 +1,69 @@ +:root { + --border-radius: 10px; +} + +.card { + border: 1px solid #EFF2F3; + border-radius: var(--border-radius); + box-shadow: 0px 11px 24px rgba(138, 138, 138, .1); + padding: 2.5rem 2.5rem 1.5rem 2.5rem; +} + +/* if blog post has thumbnail image */ + +.imageCard { + border: 1px solid #EFF2F3; + border-radius: var(--border-radius); + box-shadow: 0px 11px 24px rgba(138, 138, 138, .1); + padding: 0; +} + +.contentContainer.imageContentContainer { + padding: 2.5rem 2.5rem 1.5rem 2.5rem; + display: block; +} + +.contentContainer { + height: 100%; + display: flex; + flex-direction: column; + flex-flow: wrap; +} + +.imageContentContainer { + width: 100%; + border-radius: var(--border-radius) var(--border-radius) 0px 0px; + min-height: 200px; +} + + +.cardWrapper, .cardWrapper article { + height: 100%; +} + +.cardWrapper a { + color: inherit; +} +.cardWrapper a:hover { + text-decoration: none; +} + +[data-theme='dark'] a.ctaLink { + color: #fff; + text-decoration: underline; +} + +a.ctaLink { + color: #009999; + font-weight: 600; + margin-top: auto; +} +a.ctaLink:after { + content: ' →'; + margin-left: 5px; +} + +.icon { + margin-bottom: 0.5rem; + max-width: 50px; +} diff --git a/website/src/components/callout/index.js b/website/src/components/callout/index.js new file mode 100644 index 00000000000..60546525225 --- /dev/null +++ b/website/src/components/callout/index.js @@ -0,0 +1,29 @@ +import React from 'react'; +import styles from './styles.module.css'; +import useBaseUrl from '@docusaurus/useBaseUrl'; +import Link from '@docusaurus/Link'; +import { useColorMode } from '@docusaurus/theme-common'; + + +function Callout({ heading, subheading, cta, link }) { + const { isDarkTheme } = useColorMode(); + return ( +
    +
    +
    +
    +

    {heading}

    +

    {subheading}

    + + {cta} + +
    +
    +
    +
    + ); +} + +export default Callout; + diff --git a/website/src/components/callout/styles.module.css b/website/src/components/callout/styles.module.css new file mode 100644 index 00000000000..45a0f9d903d --- /dev/null +++ b/website/src/components/callout/styles.module.css @@ -0,0 +1,39 @@ +.Callout { + background: #262A38; + padding-top:3rem; + padding-bottom:3rem; + text-align: center; + padding-top:6rem; + padding-bottom: 6rem; + margin-top:6rem; +} + +[data-theme='light'] .Callout, [data-theme='dark'] .button { + color: #fff; +} + +.button { + background-color: #047377; + border-color: #047377; + border-radius: var(--ifm-button-border-radius); + border-style: solid; + border-width: var(--ifm-button-border-width); + color: var(--ifm-button-color); + cursor: pointer; + display: inline-block; + font-size: calc(0.875rem * var(--ifm-button-size-multiplier)); + font-weight: var(--ifm-button-font-weight); + line-height: 1.5; + padding: 0.8rem 1.5rem; + text-align: center; + user-select: none; + vertical-align: middle; + white-space: nowrap; + transition-property: color, background, border-color; + transition-duration: var(--ifm-button-transition-duration); + transition-timing-function: var(--ifm-transition-timing-default); +} + +.button:hover { + color: #ffffff; +} diff --git a/website/src/components/card/index.js b/website/src/components/card/index.js new file mode 100644 index 00000000000..ad0cfa91177 --- /dev/null +++ b/website/src/components/card/index.js @@ -0,0 +1,39 @@ +import React from 'react'; +import styles from './styles.module.css'; +import useBaseUrl from '@docusaurus/useBaseUrl'; +import Link from '@docusaurus/Link'; +import { useColorMode } from '@docusaurus/theme-common'; + + +function Card({ title, body, link, icon }) { + const { isDarkTheme } = useColorMode(); + return ( +
    + {link ? +
    + {icon && } +

    {title}

    +

    + {body} +

    +
    + :
    + {icon && } +

    {title}

    +

    + {body} +

    +
    } +
    + ); +} + +export default Card; + diff --git a/website/src/components/card/styles.module.css b/website/src/components/card/styles.module.css new file mode 100644 index 00000000000..d61148079f6 --- /dev/null +++ b/website/src/components/card/styles.module.css @@ -0,0 +1,26 @@ +:root { + --border-radius: 10px; +} + +.card { + border: 1px solid #EFF2F3; + border-radius: var(--border-radius); + box-shadow: 0px 11px 24px rgba(138, 138, 138, .1); + padding: 2.5rem 2.5rem 1.5rem 2.5rem; +} + +.cardWrapper, .cardWrapper article { + height: 100%; +} + +.cardWrapper a { + color: inherit; +} +.cardWrapper a:hover { + text-decoration: none; +} + +.icon { + margin-bottom: 0.5rem; + max-width: 50px; +} diff --git a/website/src/components/collapsible/index.js b/website/src/components/collapsible/index.js index f3a7126ec04..e039433c542 100644 --- a/website/src/components/collapsible/index.js +++ b/website/src/components/collapsible/index.js @@ -2,7 +2,7 @@ import React, {useState} from 'react'; import styles from './styles.module.css'; -function Collapsible({children, header, expand}) { +function Collapsible({children, header, description, expand}) { const [expanded, setExpanded] = useState(expand); const toggleExpanded = function() { @@ -12,10 +12,10 @@ function Collapsible({children, header, expand}) { return ( <>

    - { expanded ? '➖' : '➕' } {header} + { expanded ? ' -' : ' +' }

    - { expanded ? children : Click to expand } + { expanded ? children : { description ? description : "Click to expand" } } ); } diff --git a/website/src/components/discourse/index.js b/website/src/components/discourse/index.js new file mode 100644 index 00000000000..fe6b0dc1130 --- /dev/null +++ b/website/src/components/discourse/index.js @@ -0,0 +1,217 @@ +import React, { useState, useEffect } from 'react' +import axios from 'axios' +import feedStyles from './styles.module.css'; + +// Bare component with no default props set +export const DiscourseFeed = ({ + order, + status, + after, + before, + inString, + min_posts, + max_posts, + min_views, + max_views, + tags, + term, + category, + title, + link_text, + link_href, + show_cta, + topic_count = 5, + styles = {} +}) => { + + const [topics, setTopics] = useState([]) + const [loading, setLoading] = useState(true) + const [isError, setIsError] = useState(false) + + useEffect(() => { + let isMounted = true + + // Get topics from Discourse API + const fetchData = async () => { + try { + // Ensure error state is false and loading true + setLoading(true) + setIsError(false) + + // Build Netlify Function endpoint + const endpoint = window?.location?.hostname?.includes('localhost') + ? 'http://localhost:8888/.netlify/functions/get-discourse-topics' + : '/.netlify/functions/get-discourse-topics' + + // If 'after' prop not passed in, set relative after date + let afterDate = after + if(!afterDate) { + // Today's date + let today = new Date(); + if(category === 'help') { + const relativeDate = new Date(today.setDate(today.getDate() - 30)); + afterDate = formatDate(relativeDate) + } else { + const relativeDate = new Date(today.setDate(today.getDate() - 90)); + afterDate = formatDate(relativeDate) + } + } + + // Get Discourse topics data + const { data } = await axios.post(endpoint, { + status, + order, + after: afterDate, + before, + inString, + min_posts, + max_posts, + min_views, + max_views, + tags, + term, + category, + }) + + // Set error state if data not available + if(!data) throw new Error('Unable to get latest topics.') + + // Set topics count + if(isMounted) { + setTopics(data.slice(0, topic_count)) + setLoading(false) + } + } catch(err) { + setIsError(true) + setLoading(false) + } + } + fetchData() + + return () => { + isMounted = false + } + }, []) + + // Set initial min-height + // This is to avoid layout shifts + // which affects Lighthouse performance scores + const setMinHeight = isError + ? 'auto' + : 414 + + return ( +
    + {title && ( +

    {title}

    + )} + {loading ? ( + Loading + ) : isError || !topics?.length > 0 ? ( +

    No recent forum posts for this topic. Ask a question!

    + ) : ( +
      + {topics.map(topic => ( +
    • + {topic?.has_accepted_answer && ( + + )} + {topic.title} + {(topic?.author || topic?.posts_count || topic?.like_count) && ( + <> + {' '} + + {topic?.author && `by ${topic.author}${topic?.posts_count ? ',' : ''}`} + {' '} + {topic?.posts_count && `${topic.posts_count - 1} ${(topic.posts_count - 1) === 1 ? 'reply' : 'replies'}${topic?.like_count ? ',' : ''}`} + {' '} + {topic?.like_count ? `${topic.like_count} ${(topic.like_count) === 1 ? 'like' : 'likes'}` : ''} + + + )} + {(topic?.blurb) && ( + <> + {' '} +
      + {topic.blurb} +
      + + )} +
    • + ))} +
    + )} + {show_cta && ( + {link_text} + )} +
    + ) +} + +// A more specific version of DiscourseFeed +// with default props set. Same props can be used +export const DiscourseHelpFeed = ({ + order = 'latest_topic', + status = 'solved', + category = 'help', + tags, + show_cta = true, + link_text = 'Ask the Community', + link_href = `https://discourse.getdbt.com/new-topic${category ? `?category=${category}` : ''}${tags ? (!category ? `?tags=${tags}` : `&tags=${tags}`) : ''}`, + after = '2000-01-01', + before, + inString, + min_posts, + max_posts, + min_views, + max_views, + term, + title, + topic_count = 3, + styles = {} +}) => { + return +} + +// Helpers +function TopicWrapper({ topic, children }) { + if(topic?.slug && topic?.id) { + return ( + {children} + ) + } else { + return ( +
    {children}
    + ) + } +} + +// Format date by YYYY-MM-DD +function formatDate(date) { + return `${date.getFullYear()}-${('0'+ (date.getMonth()+1)).slice(-2)}-${('0'+ date.getDate()).slice(-2)}` +} + diff --git a/website/src/components/discourse/index.test.js b/website/src/components/discourse/index.test.js new file mode 100644 index 00000000000..c0d40e26b8f --- /dev/null +++ b/website/src/components/discourse/index.test.js @@ -0,0 +1,93 @@ +import React from 'react' +import axios from 'axios' +import { render, screen } from '@testing-library/react' +import { DiscourseFeed } from './index' + +// Mock api data +const mockAxiosResponse = { + data: [ + { + "id": 4911, + "title": "Processing source tables with differing arrival times throughout day", + "has_accepted_answer": true, + "author": "MatthewMunn", + "like_count": 1, + "posts_count": 2 + } + ], +} + +describe("Test DiscourseFeed component", () => { + // returns mocks to original state (ex: window = undefined) + afterEach(() => jest.restoreAllMocks()) + + it('Should render same text passed into title prop', () => { + render() + const testElement = screen.getByText(/Open topics/i) + expect(testElement).toBeInTheDocument() + }) + + it('Should display loading icon on inital load', () => { + render() + const img = screen.getByTestId('feed-loader') + expect(img).toBeInTheDocument() + }) + + it('Should display Discourse data after API fetch', async () => { + // Get mock api response + jest.spyOn(axios, 'post').mockResolvedValueOnce(mockAxiosResponse) + + render() + // Topic "has_accepted_answer" - should display ✅ ahead of title + const topicCheckMark = await screen.findByText(/✅/i) + expect(topicCheckMark).toBeInTheDocument() + + // Topic title should exist in document + const topicTitle = await screen.findByText(/Processing source tables/i) + expect(topicTitle).toBeInTheDocument() + + // Author should display + const topicAuthor = await screen.findByText(/MatthewMunn/i) + expect(topicAuthor).toBeInTheDocument() + + // Should display reply + const topicReply = await screen.findByText(/1 reply/i) + expect(topicReply).toBeInTheDocument() + + // Has 1 like, should display '1 like' and not '1 likes' + const topicLikes = await screen.findByText(/1 like/i) + expect(topicLikes).toBeInTheDocument() + }) + + it('Should show cta with correct text and href', async () => { + // Get mock api response + jest.spyOn(axios, 'post').mockResolvedValueOnce(mockAxiosResponse) + + render() + + const button = await screen.findByTestId('feed-cta') + const buttonText = await screen.findByText(/See open topics/i) + const buttonHref = buttonText.closest('a') + expect(button).toBeInTheDocument() + expect(buttonText).toBeInTheDocument() + expect(buttonHref).toHaveAttribute('href', 'https://discourse.getdbt.com/c/help/19') + }) + + it('Should display message when no topics found', async () => { + // Get mock api response + const mockEmptyResponse = { + data: [], + } + jest.spyOn(axios, 'post').mockResolvedValueOnce(mockEmptyResponse) + + render() + + const errorText = await screen.findByTestId('error-text') + expect(errorText).toBeInTheDocument() + }) +}) + diff --git a/website/src/components/discourse/styles.module.css b/website/src/components/discourse/styles.module.css new file mode 100644 index 00000000000..5aa0c79e924 --- /dev/null +++ b/website/src/components/discourse/styles.module.css @@ -0,0 +1,69 @@ +/* Topics Section */ +.discourseTopics { + margin: 2rem 0 3rem; + display: flex; + flex-direction: column; + align-items: flex-start; + background: var(--ifm-menu-color-background-active); + padding: 20px; + border-radius: 10px; +} +[data-theme="dark"] .discourseTopics { + background: #333B47; +} +.discourseTopics:last-of-type { + margin-bottom: 0; +} +.discourseTopics .solvedTopic { + font-size: .8rem; +} +.discourseTopics .solvedTopic:hover { + cursor: default; +} +.discourseTopics ul { + list-style: none; + margin: 0; + padding: 0; + width: 100%; +} +.discourseTopics blockquote { + border-left: 1px solid var(--ifm-toc-border-color); +} +.discourseTopics ul li { + padding: 1rem 0; + border-bottom: 1px solid var(--ifm-toc-border-color); +} +.discourseTopics ul li:first-of-type { + padding-top: 0; +} +.discourseTopics ul li:last-of-type { + border-bottom: none; +} +.discourseTopics ul li a { + font-weight: 600; + font-size: 18px; +} +[data-theme="dark"] .discourseTopics ul li a { + color: #91D0CB; +} +.discourseTopics blockquote { + margin-bottom: 0; +} + +/* Cta */ +.discourseCta { + font-size: 1.1rem; + margin-top: auto; + color: #fff; +} +.discourseCta:hover { + background: var(--color-light-teal); + color: #fff; +} + +/* Loading icon */ +.discourseTopics .loadingIcon { + max-width: 100px; + display: block; +} + diff --git a/website/src/components/docSearchWeight/index.js b/website/src/components/docSearchWeight/index.js new file mode 100644 index 00000000000..ab024f9b00e --- /dev/null +++ b/website/src/components/docSearchWeight/index.js @@ -0,0 +1,31 @@ +import React from 'react'; +import styles from './styles.module.css'; + +function DocSearchWeight({ weight }) { + + let searchWeight + + let allowedValues = ['light', 'medium', 'heavy'] + + // Intercept the value from frontmatter and set it to the correct value + if (!weight) { + searchWeight = 0 + } else if (weight == "light") { + searchWeight = 25 + } else if (weight == "medium") { + searchWeight = 50 + } else if (weight == "heavy") { + searchWeight = 100 + } + + // Throw an error if the value is not allowed + if (weight && !allowedValues.includes(weight)) { + throw `Invalid value for search_weight: ${weight}. Allowed values are: ${allowedValues.join(', ')}.` + } + + return ( +
    {searchWeight}
    + ); +} + +export default DocSearchWeight; diff --git a/website/src/components/docSearchWeight/styles.module.css b/website/src/components/docSearchWeight/styles.module.css new file mode 100644 index 00000000000..ff5589d6627 --- /dev/null +++ b/website/src/components/docSearchWeight/styles.module.css @@ -0,0 +1,3 @@ +.customSearchWeight { + display: none; +} diff --git a/website/src/components/events/index.js b/website/src/components/events/index.js new file mode 100644 index 00000000000..a2c0df8eba5 --- /dev/null +++ b/website/src/components/events/index.js @@ -0,0 +1,16 @@ +import React, { useEffect } from 'react' +import feedStyles from './styles.module.css'; + +export default function EventsFeed(styles = {}) { + useEffect(() => { + const script = document.createElement('script'); + script.src = 'https://cdn.addevent.com/libs/cal/js/cal.events.embed.t3.init.js'; + script.async = true; + document.body.appendChild(script); + }, []) + return ( +
    +
    +
    + ) +} diff --git a/website/src/components/events/styles.module.css b/website/src/components/events/styles.module.css new file mode 100644 index 00000000000..d5b3e877c45 --- /dev/null +++ b/website/src/components/events/styles.module.css @@ -0,0 +1,5 @@ +.aeEmbed { + width: 100%; + height: 500px; + margin: 0 auto 20px; +} diff --git a/website/src/components/faqList/index.js b/website/src/components/faqList/index.js index b04d22d73b2..e29cbfddd54 100644 --- a/website/src/components/faqList/index.js +++ b/website/src/components/faqList/index.js @@ -2,13 +2,13 @@ import React from 'react'; import styles from './styles.module.css'; const files = require.context( - '../../../docs/faqs', - true, - /\.md$/ + '../../../docs/faqs', + true, + /\.md$/ ); var faqs = []; -files.keys().forEach(function(key, i) { +files.keys().forEach(function (key, i) { if (key.endsWith('index.md')) { return; } @@ -18,22 +18,22 @@ files.keys().forEach(function(key, i) { var el = faq.default({}); faqs.push( -
    -

    - # - {i+1}. {meta.title} -

    -
    - {el} -
    +
    +

    + # + {i + 1}. {meta.title} +

    +
    + {el} +
    ); }); -function FAQList({children, style}) { +function FAQList({ children, style }) { return (
    - {faqs} + {faqs}
    ); } diff --git a/website/src/components/faqs/index.js b/website/src/components/faqs/index.js index 221c2ec3c09..4f07d9127f4 100644 --- a/website/src/components/faqs/index.js +++ b/website/src/components/faqs/index.js @@ -1,8 +1,8 @@ -import React, {useState, useEffect} from 'react'; +import React, { useState, useEffect } from 'react'; import styles from './styles.module.css'; -import {usePluginData} from '@docusaurus/useGlobalData'; +import { usePluginData } from '@docusaurus/useGlobalData'; -function FAQ({children, src, alt_header=null}) { +function FAQ({ children, src, alt_header = null }) { const [isOn, setOn] = useState(false); const [filePath, setFilePath] = useState(src) @@ -10,15 +10,15 @@ function FAQ({children, src, alt_header=null}) { // Get all faq file paths from plugin const { faqFiles } = usePluginData('docusaurus-build-global-data-plugin'); - + useEffect(() => { // Search for faq where frontmatter ID matches src prop const faqFile = faqFiles.find(file => file.id === src) // If faqFile found with ID, set filePath for this file - if(faqFile?.id) { - const data = faqFile.filePath.match(/(docs\/faqs\/(.*)\.md$)/g) - if(data?.length) { + if (faqFile?.id) { + const data = faqFile.filePath.match(/(docs\/docs\/faqs\/(.*)\.md$)/g) + if (data?.length) { setFilePath(data[1]) } } @@ -27,34 +27,34 @@ function FAQ({children, src, alt_header=null}) { useEffect(() => { try { const file = require(`../../../docs/faqs/${filePath}.md`) - if(file) { + if (file) { const meta = file.metadata; const contents = file.default({}); - setFileContent({meta, contents}) + setFileContent({ meta, contents }) } - } catch(err) { + } catch (err) { return null } }, [filePath]) - - const toggleOn = function() { + + const toggleOn = function () { setOn(!isOn); } return ( -
    - - -   - { alt_header || fileContent?.meta && fileContent.meta.title } - -
    - { fileContent?.contents && fileContent.contents } -
    +
    + + +   + {alt_header || fileContent?.meta && fileContent.meta.title} + +
    + {fileContent?.contents && fileContent.contents}
    +
    ); } diff --git a/website/src/components/faqs/styles.module.css b/website/src/components/faqs/styles.module.css index d37b292825b..baeecd2c2f5 100644 --- a/website/src/components/faqs/styles.module.css +++ b/website/src/components/faqs/styles.module.css @@ -10,7 +10,7 @@ :local(.toggle) { background-image: var(--ifm-menu-link-sublist-icon); - background-size: 2rem 2rem; + background-size: 1.25rem 1.25rem; background-position: center; content: ' '; display: inline-block; @@ -19,11 +19,17 @@ vertical-align: middle; } +/* hack for unswizzled FAQ arrows */ +:local(html[data-theme='dark'] .toggle) { + filter: invert(1); +} + :local(.body) { margin-left: 2em; margin-bottom: 10px; padding: 20px; background-color: #e3f8f8; + color: var(--ifm-color-gray-900); } :local(.body > p:last-child) { diff --git a/website/src/components/file/styles.module.css b/website/src/components/file/styles.module.css index dc9cf008306..a461116cad6 100644 --- a/website/src/components/file/styles.module.css +++ b/website/src/components/file/styles.module.css @@ -18,6 +18,12 @@ font-size: 12px; } +:local(html[data-theme='dark'] .title) { + background-color: var(--ifm-menu-color-background-active); + border: 1px solid var(--ifm-menu-color-background-active); + color: white; +} + :local(.titleInner) { margin: 4px 8px; } diff --git a/website/src/components/hero/index.js b/website/src/components/hero/index.js new file mode 100644 index 00000000000..b6dbe7e245b --- /dev/null +++ b/website/src/components/hero/index.js @@ -0,0 +1,24 @@ +import React from 'react'; +import styles from './styles.module.css'; +import { useColorMode } from '@docusaurus/theme-common'; + + +function Hero({ heading, subheading, showGraphic = false }) { + const { isDarkTheme } = useColorMode(); + return ( +
    +
    +
    +
    +
    +

    {heading}

    +

    {subheading}

    +
    +
    +
    +
    + ); +} + +export default Hero; + diff --git a/website/src/components/hero/styles.module.css b/website/src/components/hero/styles.module.css new file mode 100644 index 00000000000..efacee33f6e --- /dev/null +++ b/website/src/components/hero/styles.module.css @@ -0,0 +1,51 @@ +.Hero { + background: #262A38; + padding-top:3rem; + padding-bottom:3rem; + text-align: center; + display: flex; + justify-content: center; + align-items: center; + /* position: relative; */ + min-height: 375px; + margin-bottom: 0; +} + +[data-theme='light'] .Hero { + color: #fff; +} + +@media (min-width: 996px) { + .Hero { + margin-bottom: 6rem; + } +} + +.showGraphic { + display: none; +} + +@media (min-width: 996px) { + .showGraphic{ + position: absolute; + left: -50px; + top:150px; + content:''; + display: block; + width: 25vw; + max-width: 315px; + height: 428px; + background-size: contain; + background-image: url('/img/hero-graphic-2x.png'); + background-repeat: no-repeat; + } +} + +.w60 { + width: 100%; +} +@media (min-width: 996px) { + .w60 { + width: 60%; + } +} diff --git a/website/src/components/lightbox/index.js b/website/src/components/lightbox/index.js index 27b94212010..d312e67ca8f 100644 --- a/website/src/components/lightbox/index.js +++ b/website/src/components/lightbox/index.js @@ -28,7 +28,7 @@ function Lightbox({children, src, title, collapsed}) { /> -

    { title }

    +

    { title }

    ); diff --git a/website/src/components/lightbox/styles.module.css b/website/src/components/lightbox/styles.module.css index 04d1f228254..4c33e5173e5 100644 --- a/website/src/components/lightbox/styles.module.css +++ b/website/src/components/lightbox/styles.module.css @@ -1,19 +1,18 @@ :local(.title) { text-align: center; + font-size: small; width: 100%; } - :local(.docImage) { - filter: drop-shadow(1px 4px 10px #aaaaaa33); + filter: drop-shadow(4px 4px 6px #aaaaaa33); margin: 10px auto; display: block; - width: 600px; - max-width: 100%; + max-width: 400px; } :local(.collapsed) { - max-width: 300px !important; + max-width: 400px !important; display: inline-block; } diff --git a/website/src/components/postCarousel/index.js b/website/src/components/postCarousel/index.js new file mode 100644 index 00000000000..0b93c2d29e5 --- /dev/null +++ b/website/src/components/postCarousel/index.js @@ -0,0 +1,37 @@ +import React from 'react'; +import BlogPostCard from '@site/src/components/blogPostCard'; +import { Swiper, SwiperSlide } from 'swiper/react'; +import { Navigation } from 'swiper'; +import 'swiper/css'; +import 'swiper/css/navigation'; + + +function PostCarousel({ blogPostData }) { + return ( +
    + + + {blogPostData.map((item) => )} + + +
    + ); +} + +export default PostCarousel; + diff --git a/website/src/components/snippet/index.js b/website/src/components/snippet/index.js index e92c2aed901..d16381ac913 100644 --- a/website/src/components/snippet/index.js +++ b/website/src/components/snippet/index.js @@ -1,6 +1,5 @@ import React from 'react' import styles from './styles.module.css'; -import WHCore from '@site/src/components/whcode'; {/* The Snippet component works in a similar way as FAQs. @@ -12,7 +11,7 @@ export default function Snippet({ src }) { const contents = file.default({}); return (
    - { contents } + { contents }
    ) } diff --git a/website/src/components/whcode/index.js b/website/src/components/whcode/index.js index 1ffcae0bd4b..cf581442208 100644 --- a/website/src/components/whcode/index.js +++ b/website/src/components/whcode/index.js @@ -4,25 +4,33 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; function WHCode({children}) { - if (children.length != 4) { - throw "Expected four elements as children of WHCode element, got " + children.length; - } + + let tabValuesArr = [] + let value + + const tabItems = children.map((child, index) => { + if (child.props.warehouse == undefined || child.props.warehouse == '') { + throw "Expected warehouse prop to be defined for each child of WHCode component"; + } + + tabValuesArr.push({ label: child.props.warehouse, value: child.props.warehouse }) + + value = child.props.warehouse + + return ( + + {child} + + ); + }) + return ( - {children[0]} - {children[1]} - {children[2]} - {children[3]} + defaultValue={tabValuesArr[0].value} + values={tabValuesArr}> + {tabItems} ); } -export default WHCode; \ No newline at end of file +export default WHCode; diff --git a/website/src/components/wistia/index.js b/website/src/components/wistia/index.js index fdcfad99b62..1230b119f7f 100644 --- a/website/src/components/wistia/index.js +++ b/website/src/components/wistia/index.js @@ -1,26 +1,30 @@ import React from 'react'; - - function WistiaVideo({id, paddingTweak = "56.25%"}) { return ( - - -
    -
    - -
    -
    - -
    + +
    +
    + +
    +
    + +
    ); } diff --git a/website/src/css/custom.css b/website/src/css/custom.css index 8742e426e1a..d9eddd9de3b 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -7,6 +7,8 @@ --ifm-color-info-light: #d9f6f4 !important; --ifm-color-warning-light: #ffe1be !important; --ifm-color-danger-light: #f9d3d4 !important; + --ifm-color-gray-900: #262a38; + --ifm-font-weight-narrow: 400; --ifm-font-weight-semibold: 600; --ifm-font-weight-bold: 700; --ifm-h1-font-size: 2.5rem; @@ -14,33 +16,177 @@ --ifm-h3-font-size: 1.563rem; --ifm-h4-font-size: 1.063rem; --ifm-h5-font-size: 0.938rem; - --ifm-navbar-height: 94px; + --ifm-navbar-height: 85px; + --ifm-navbar-link-hover-color: var(--color-orange); + --ifm-navbar-link-active-color: var(--color-orange); + --ifm-footer-padding-vertical: calc(var(--ifm-spacing-vertical) * 1.875); --ifm-footer-background-color: #4f5d75; - --ifm-footer-color: #bfc0c0; + --ifm-footer-color: var(--color-white); --ifm-link-color: #009999; --ifm-color-primary: #047377; --ifm-link-hover-color: #047377; - --ifm-button-border-radius: 4px; + --ifm-button-border-radius: 5px; --ifm-menu-color: #262a38; --ifm-menu-color-active: #009999; --ifm-pagination-nav-color-hover: #047377; --ifm-tabs-color-active: #047377; + --ifm-menu-color-background-active: #effbfa; --ifm-font-family-monospace: "Source Code Pro", SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; --ifm-font-family-base: "Source Sans Pro", sans-serif; --ifm-heading-font-family: "Source Sans Pro", sans-serif; - --ifm-code-padding-vertical: .2rem; - --ifm-code-padding-horizontal: .4rem; + --ifm-code-padding-vertical: 0.2rem; + --ifm-code-padding-horizontal: 0.4rem; --ifm-code-background: var(--ifm-color-emphasis-200); --ifm-navbar-item-padding-horizontal: 1rem; --ifm-alert-color: var(--ifm-font-color-base-inverse); + --ifm-color-info-contrast-foreground: var(--ifm-color-gray-900); + --ifm-alert-foreground-color: --ifm-color-gray-900; + --ifm-toc-padding-vertical: 1rem; --color-primary-blue: #262a38; --color-green-blue: #047377; --color-light-teal: #009999; --color-light-blue: #96d7d1; --color-orange: #ff694a; --color-off-white: #eff2f3; + --color-white: white; --color-nav-text: #bfc0c0; + --color-nav-sub-level-text: #545a66; + --color-footer-accent: #979eab; + --docsearch-text-color: #262a38; + --pagination-icon-next: "\2192"; + --pagination-icon-prev: "\2190"; + --filter-brightness-low: 1.1; + --filter-brightness-high: 1.5; + + --light-dark-toggle: "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PHBhdGggZD0iTTQuMzA4IDMuMzg1YzAtMS4xNzguMTczLTIuMzcuNjE1LTMuMzg1QzEuOTgzIDEuMjggMCA0LjI4MiAwIDcuNjkyQTguMzA4IDguMzA4IDAgMCAwIDguMzA4IDE2YzMuNDEgMCA2LjQxMi0xLjk4MyA3LjY5Mi00LjkyMy0xLjAxNS40NDItMi4yMDcuNjE1LTMuMzg1LjYxNWE4LjMwOCA4LjMwOCAwIDAgMS04LjMwNy04LjMwN1oiIGZpbGw9IiM5MkEwQjMiLz48L3N2Zz4="; + + /* search overrides */ + --docsearch-searchbox-background: var(--color-white) !important; + --docsearch-searchbox-focus-background: var(--color-white) !important; + --docsearch-searchbox-shadow: inset 0 0 0 0 var(--docsearch-primary-color) !important; + + --docsearch-modal-background: var(--color-white) !important; + --docsearch-modal-shadow: none; + --docsearch-muted-color-for-keys: #eff2f3; + + --docsearch-highlight-color: var(--docsearch-muted-color) !important; + --docsearch-muted-color: #545a66; + --docsearch-container-background: rgba(255, 255, 255, 1) !important; + + --docsearch-key-gradient: transparent !important; + --docsearch-key-shadow: inset 0 0 0 0 #cdcde6, inset 0 0 0 0 #fff, + 0 0 0 0 rgba(30, 35, 90, 0) !important; + --docsearch-key-shadow: none !important; +} + +/* dark theme styles!!! */ +html[data-theme="dark"] { + --ifm-navbar-background-color: var(--color-primary-blue); + --ifm-menu-color-background-active: var(--color-primary-blue); + --ifm-menu-color: var(--color-white); + + /* search button darkmode */ + --docsearch-muted-color: var(--color-off-white); + --docsearch-muted-color-for-keys: var(--docsearch-muted-color); + --docsearch-searchbox-background: var(--color-primary-blue) !important; + + /* search popup */ + --docsearch-modal-background: var(--color-primary-blue) !important; + --docsearch-footer-background: var(--color-primary-blue) !important; + --docsearch-searchbox-focus-background: var(--color-primary-blue) !important; + --docsearch-highlight-color: var(--docsearch-muted-color) !important; + + /* admonition icon */ + --ifm-color-info-contrast-foreground: var(--ifm-color-gray-900); + + --ifm-table-cell-color: var(--color-green-blue); +} + +/* For /dbt-cloud/api REDOC Page */ +html[data-theme="dark"] .api-content h2, +html[data-theme="dark"] .api-content h3, +html[data-theme="dark"] .api-content h4, +html[data-theme="dark"] .api-content h5, +html[data-theme="dark"] .api-content h5 span, +html[data-theme="dark"] .api-content h1 { + color: white; +} + +html[data-theme="dark"] .api-content button, +html[data-theme="dark"] .api-content a { + filter: brightness(1.25); +} + +html[data-theme="dark"] .api-content a:hover { + filter: brightness(1.25); +} + +.redoc-wrap .api-content a, +.redoc-wrap .api-content a:visited { + color: var(--color-green-blue); +} + +.redoc-wrap { + font-size: 1rem; +} + +.api-content { + font-size: 1.125rem; +} + +.menu-content { + font-size: 1rem; + margin-top: 1em; +} + +html[data-theme="dark"] .api-content { + color: white; +} + +html[data-theme="dark"] .api-content table td > span, +html[data-theme="dark"] .api-content table td p { + color: var(--color-orange); +} + +table td { + vertical-align: top; +} + +html[data-theme="dark"] table td { + filter: brightness(1.25); + color: white; +} + +html[data-theme="dark"] .api-content table td span { + color: white; +} + +html[data-theme="dark"] .hComDo div { + background: transparent; +} + +html[data-theme="dark"] .api-content table td p { + color: brown; +} + +html[data-theme="dark"] .redoc-wrap .menu-content * :not(.operation-type), +html[data-theme="dark"] .redoc-wrap .menu-content, +html[data-theme="dark"] .redoc-wrap .menu-content .search-input { + color: white !important; + background-color: transparent !important; +} + +html[data-theme="dark"] .redoc-wrap svg { + filter: brightness(4.5); +} + +html[data-theme="dark"] .redoc-markdown pre { + filter: brightness(1.5); +} + +html[data-theme="dark"] input.sc-avest { + filter: brightness(2.5); } .dropdown--version--hide .dropdown__menu { @@ -53,33 +199,33 @@ background: var(--color-off-white); } -/* For /dbt-cloud/api redoc page */ -div.collapser { - color: white; +html[data-theme="dark"] .container.home { + background: var(--ifm-background-color) !important; } -.admonition h5 { - text-transform: none; - font-size: 1rem; -} - -.navbar__link--active { - border-bottom: 1px solid #fff; - padding: 0px 2px; - margin-left: 10px; - margin-right: 10px; +div.collapser { + color: var(--color-white); } h1, h2, h3 { - font-weight: var(--ifm-font-weight-semibold); + font-weight: var(--ifm-font-weight-bold); } -.navbar__search-input { - color: #505d73; - font-weight: var(--ifm-font-weight-semibold); - font-size: 1rem; +/* V2.0.0-beta.6 style updates */ +.padding-top--md { + padding-top: 2rem !important; +} + +code { + border: none; + background-color: var(--ifm-color-emphasis-200); + color: var(--ifm-color-emphasis-900); +} + +html[data-theme="dark"] a code { + color: var(--color-white); } .main-wrapper .home .col > p { @@ -90,6 +236,7 @@ h3 { h1.docTitle_node_modules-\@docusaurus-theme-classic-src-theme-DocItem- { font-size: 2rem; } + /* Adding FAQ specific links so we can left-justify the FAQ page titles */ a.hash-link-faq:link { margin-left: -20px; @@ -98,84 +245,321 @@ a.hash-link-faq:link { a.hash-link-faq:visited, a.hash-link-faq:hover { - color: black; + color: var(--docsearch-text-color); } a.hash-link-faq:active { - color: black; + color: var(--docsearch-text-color); } a.hash-link:link, a.hash-link:visited, a.hash-link:hover { - color: black; + color: var(--docsearch-text-color); } -footer { - font-weight: var(--ifm-font-weight-bold); +/* Deletes the Hash after TOC links that is generated by Docusaurus here for some reason */ +ul.table-of-contents .hash-link::before { + content: ''; + display: none; } -footer .footer__copyright a { - color: var(--ifm-footer-color); - text-decoration: none; + +pre { + font-size: 14px !important; +} + +a.code-link { + color: var(--color-orange); + cursor: pointer; } -footer .footer__copyright a:hover { + +a.code-link:hover { text-decoration: underline; } -/* Fix for nav links that break (like dbt Cloud) */ +/* navbar */ +[data-theme="dark"] nav.navbar, +[data-theme="dark"] div.navbar-sidebar__brand { + background-color: var(--ifm-navbar-background-color); +} + +.navbar__logo { + margin-left: 1.25rem; +} +.navbar__search-input { + color: #505d73; + font-weight: var(--ifm-font-weight-semibold); + font-size: 1rem; +} + +.navbar__link { + font-size: 1rem; + margin: 0 var(--ifm-navbar-item-padding-horizontal) 0; + padding: 0; + border-bottom: 1px solid transparent; +} + +/* Fix for nav links that break (like dbt Cloud) */ .navbar__item.navbar__link { white-space: nowrap; + align-items: center; } -pre { - font-size: 14px !important; +.navbar__link--active { + border-bottom: 1px solid var(--color-orange); + color: var(--color-orange); } -a.code-link { - color: var(--color-orange); - cursor: pointer; +.navbar__link.nav-versioning { + display: flex; + align-items: baseline; + justify-content: center; } -a.code-link:hover { - text-decoration: underline; +.navbar-sidebar .menu__link.nav-create-account { + color: var(--color-white); + border: none; } -nav.navbar, -div.navbar-sidebar__brand { - background-color: var(--color-primary-blue); +.navbar-sidebar .menu__link.nav-create-account:hover { + color: var(--color-white); + background: var(--color-light-teal); } -nav.navbar a.navbar__link { - color: var(--color-nav-text); - font-weight: var(--ifm-font-weight-bold); - transition: 100ms all ease-in-out; +.navbar__items .navbar__link.nav-create-account { + color: var(--color-white); + height: 30px; + align-self: auto; + padding: 0 12px; + margin-top: 0; + margin-left: 0; + border: none; + order: 3; } -nav.navbar a.navbar__link:hover { - color: var(--color-light-teal); +.navbar__items .navbar__link.nav-create-account:hover { + background: var(--color-light-teal); } -nav.navbar .navbar__toggle, -nav.navbar a.navbar__link--active { - color: white !important; - font-weight: bold !important; +/* Reorder search and darkmode toggle */ +.navbar__items.navbar__items--right div[class^="searchBox"] { + order: 2; } -li.menu__list-item a.navbar__link--active { - color: black !important; - font-weight: bold !important; +.navbar__items.navbar__items--right div[class^="toggle"] { + order: 4; } -nav.navbar div.navbar__inner { - margin-left: 10px !important; +/* moon/sun icons for dark/light mode */ +html[data-theme="light"] svg[class^="lightToggleIcon_"] { + display: none; +} + +html[data-theme="light"] svg[class^="darkToggleIcon_"] { + display: block; +} + +html[data-theme="dark"] svg[class^="lightToggleIcon_"] { + display: block; +} + +html[data-theme="dark"] svg[class^="darkToggleIcon_"] { + display: none; +} + +/* Search Button Styles */ +.search { + width: 100%; + padding: 10px 0px; + font-size: 20px; + border: 0px; + outline-width: 0px; + transition: all 0.3s ease; +} + +/* search popup */ +.DocSearch-Container { + background-color: rgba(0, 0, 0, 0.85) !important; +} + +.DocSearch-Form { + border: 1px solid var(--docsearch-muted-color); +} + +.DocSearch-Footer { + background: var(--docsearch-footer-background); + padding: 0 var(--docsearch-spacing); + border-radius: 0 0 5px 5px !important; +} + +.navbar-sidebar__close { + margin-right: 0.75em; +} + +/* navbar buttons */ +a.navbar__item.navbar__link.btn { + border: none; + color: var(--color-white); + background-color: var(--ifm-tabs-color-active); + align-self: center; + padding: 0.5em 1em; + border-radius: var(--ifm-button-border-radius); } -.dropdown__link { +a.navbar__item.navbar__link.btn:hover { + filter: brightness(var(--filter-brightness-high)); + transition: all 0.3s ease; +} + +/* sidebar menu */ +.menu__link { font-size: 1rem; + justify-content: space-between; + flex-direction: row; + padding: 1rem; } -.dropdown__link:hover { - cursor: pointer; + +.menu__list-item-collapsible .menu__link { + color: inherit; +} + +.menu__list-item-collapsible:hover { + background: var(--ifm-menu-color-background-hover) !important; +} + +.menu__caret, .menu__link { + display: flex; + align-items: center; +} + +.menu__caret { + padding: 0 0; + margin-left: 0; +} + +.menu__caret:hover { + background: transparent; +} + +/* Prevent layout shift when sidebar gets vertical overflow */ +@media(min-width: 996px) { + .theme-doc-sidebar-menu.menu__list { + max-width: 260px; + } +} + +/* level2 */ +li.theme-doc-sidebar-item-category.theme-doc-sidebar-item-category-level-2 .menu__list-item-collapsible { + font-weight: 600; + margin-top: .5rem; + margin-bottom: .5rem; +} + +.menu__list .menu__list { + font-weight: var(--ifm-font-weight-narrow); + padding-left: 1rem; +} + +.menu__list .menu__list .menu__link { + padding-top: 0.5rem; + padding-bottom: 0.5rem; +} + +.menu__list-item-collapsible a { + padding-top: .75rem; + padding-bottom: .75rem; +} + +.menu__list .menu__list a.menu__link.menu__link--sublist { + font-weight: var(--ifm-font-weight-semibold); + padding: 1rem; +} + +.menu__list-item-collapsible .menu__link--active { + color: inherit; + background-color: inherit; +} + +i.theme-doc-sidebar-item-category.theme-doc-sidebar-item-category-level-2.menu__list-item { + margin-left: 1rem; +} + +/* color for subtext only */ +.theme-doc-sidebar-item-link + :not(.menu__list-item-collapsible .menu__link:first-of-type) { + color: var(--color-nav-sub-level-text); +} + +[data-theme="dark"] + .theme-doc-sidebar-item-link + :not(.menu__list-item-collapsible .menu__link:first-of-type) { + color: var(--color-white); +} + +.theme-doc-sidebar-item-link .menu__link--active { + color: var(--ifm-link-color); +} + +/* set < icon to right side */ +.menu__list-item-collapsible { + flex-direction: row; + background-color: transparent; +} + +.menu__list-item-collapsible button { + padding-right: 1rem; +} + +.menu__list-item-collapsible:hover { + background-color: transparent; +} + +.menu__list-item--collapsed .menu__link--sublist::after, +.menu__list-item--collapsed .menu__caret::before { + transform: rotateZ(0deg); +} + +/* Mobile ToC caret */ +.docs-doc-page .theme-doc-toc-mobile .clean-btn:after { + transform: rotate(90deg); +} +.docs-doc-page .theme-doc-toc-mobile[class*="Expanded"] .clean-btn:after { + transform: rotate(180deg); +} + +/* < icon */ +.menu__link--sublist::after, +.menu__caret::before { + transform: rotate(90deg); + margin-left: 1em; + background: var(--ifm-breadcrumb-separator) center; + background-repeat: no-repeat; + height: 9px; + min-width: 6px; + width: calc( + var(--ifm-breadcrumb-separator-size) * var(--ifm-breadcrumb-size-multiplier) * + var(--ifm-breadcrumb-separator-size-multiplier) + ); +} + +/* Table of Contents */ +.table-of-contents__link:hover, +.table-of-contents__link:hover code, +.table-of-contents__link--active, +.table-of-contents__link--active code, +.tableOfContents_src-theme-TOC-styles-module a.theme-edit-this-page:hover { + color: var(--color-orange); +} + +.tableOfContents_src-theme-TOC-styles-module a.theme-edit-this-page { + color: var(--ifm-toc-link-color); +} + +.table-of-contents { + font-size: 0.9rem; + padding-left: 1.25rem; + padding-right: 1.25rem; } .featherlight:before { @@ -195,13 +579,38 @@ nav.navbar div.navbar__inner { font-size: 20px !important; } -/* Shamelessly copy/pasted from our style guide */ +/* admonition */ +.admonition { + border-left-width: 0.625rem; + border-radius: 0.625rem; +} +.admonition h5 { + text-transform: none; + font-size: 1.5rem; +} + +.admonition-icon svg { + fill: var(--ifm-color-gray-900); +} + +.admonition:not(.admonition-note) h5, +.admonition:not(.admonition-note) .admonition-content, +.admonition:not(.admonition-note) .admonition-content a { + color: var(--ifm-color-gray-900); +} + +.markdown .admonition-content p { + font-size: 1rem; +} + +/* Shamelessly copy/pasted from our style guide */ .alert { padding: 1.5rem 2rem; - margin-bottom: 1.8rem; + margin-top: 2.125em; + margin-bottom: 1.8em; /* border: 1px solid transparent; */ - border-radius: 4px; + border-radius: 5px; } .alert h4 { margin-top: 0; @@ -221,18 +630,30 @@ nav.navbar div.navbar__inner { .alert--info { --ifm-alert-background-color: var(--ifm-color-info-light); --ifm-alert-border-color: var(--ifm-color-info); + color: var(--ifm-color-gray-900); } .alert--success { --ifm-alert-background-color: var(--ifm-color-success-light); --ifm-alert-border-color: var(--ifm-color-success); + color: var(--ifm-color-gray-900); } .alert--danger { --ifm-alert-background-color: var(--ifm-color-danger-light); --ifm-alert-border-color: var(--ifm-color-danger); + color: var(--ifm-color-gray-900); } .alert--warning { --ifm-alert-background-color: var(--ifm-color-warning-light); --ifm-alert-border-color: var(--ifm-color-warning); + color: var(--ifm-color-gray-900); +} + +/* for dark mode */ +.alert--info a, +.alert--success a, +.alert--danger a, +.alert--warning a { + color: var(--ifm-color-gray-900); } .linkout { @@ -246,6 +667,7 @@ nav.navbar div.navbar__inner { vertical-align: middle; } +/* banners */ .banner-animation { /*background-color: #013544;*/ background-color: var(--color-off-white); @@ -282,23 +704,11 @@ nav.navbar div.navbar__inner { animation: march 8s linear infinite forwards; } -.search { - width: 100%; - padding: 10px 0px; - font-size: 20px; - border: 0px; - outline-width: 0px; +div .toggle_src-components-faqs-styles-module { + background-size: 1.25rem 1.25rem; } -.menu__link { - font-size: 1rem; - padding: 5px 0px 5px 10px !important; -} - -.home .row .col--4 { - margin-bottom: 20px; - min-height: 251px; -} +/* card styles */ .card.large { position: relative; height: 100%; @@ -311,6 +721,7 @@ nav.navbar div.navbar__inner { .card.large .card__footer { z-index: 9; } + .card.large:before { position: absolute; z-index: 0; @@ -321,6 +732,7 @@ nav.navbar div.navbar__inner { background: var(--color-primary-blue); content: ""; } + .card.large:after { position: absolute; z-index: 0; @@ -338,10 +750,11 @@ nav.navbar div.navbar__inner { .home .card { background: var(--ifm-footer-background-color); - color: #fff; + color: var(--color-white); } .card.large .button { + color: var(--color-white); font-size: 1.25rem; font-weight: var(--ifm-font-weight-semibold); margin-top: 0.25rem; @@ -354,15 +767,15 @@ nav.navbar div.navbar__inner { } .card.large .button:hover { - background: #fff; - border-color: #fff; + background: var(--color-white); + border-color: var(--color-white); color: var(--color-green-blue); } .card.large.dark .button:hover { background: var(--color-light-teal); border-color: var(--color-light-teal); - color: #fff; + color: var(--color-white); } .card.large.light .button, @@ -374,7 +787,7 @@ nav.navbar div.navbar__inner { .card.large.light .button:hover { background: #de5d43; border-color: #de5d43; - color: #fff; + color: var(--color-white); } .card.large.light-blue .button { @@ -394,87 +807,83 @@ nav.navbar div.navbar__inner { line-height: 26px; } -/* Table of Contents */ -.table-of-contents__link:hover, -.table-of-contents__link:hover code, -.table-of-contents__link--active, -.table-of-contents__link--active code { - color: var(--ifm-link-hover-color); -} -.table-of-contents { - font-size: 0.9rem; +/* Pagination */ +.pagination-nav { + gap: 1em; } -/* Pagination */ .pagination-nav__link { + padding: 1rem 0; transition: 100ms all ease-in-out; + border: none; } -/* Font Sizing Adjustments */ -.markdown > p { - font-size: 1.05rem; -} -.pagination-nav__link { - font-size: 1.05rem; -} -.title_src-components-file- { - font-size: 0.813rem !important; +.pagination-nav__item--next .pagination-nav__label::after { + content: var(--pagination-icon-next); + font-weight: var(--ifm-font-weight-narrow); + margin-left: 0.5em; } -/* Docs */ -.docImage_src-components-lightbox-, -.docImage_src-components-lightbox-styles-module, -.docImage_2V3X, .docImage_917O { - max-width: 100%; +.pagination-nav__item:first-child .pagination-nav__label::before { + content: var(--pagination-icon-prev); + font-weight: var(--ifm-font-weight-narrow); + margin-right: 0.5em; } -.generatedIndexPage_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocCategoryGeneratedIndexPage-styles-module .cardContainer_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocCard-styles-module, .generatedIndexPage_aEAk .card { - color: #fff; - background: var(--ifm-footer-background-color); -} -.generatedIndexPage_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocCategoryGeneratedIndexPage-styles-module .cardContainer_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocCard-styles-module:hover, .generatedIndexPage_aEAk .card:hover { - color: #fff; - background: var(--color-primary-blue); +.pagination-nav__sublabel { + font-weight: var(--ifm-font-weight-narrow); + margin-bottom: 1em; } -@media (min-width: 997px) and (max-width: 1150px) { - nav.navbar a.navbar__link { - font-size: 0.9rem; - } - .navbar__search .navbar__search-input { - max-width: 160px; - } +/* Font Sizing Adjustments */ +.markdown, +.markdown > p, +.markdown li, +blockquote, +th, +td, +.faqs, +.tabs-container { + font-size: 1.125rem; + line-height: 1.5; } - -@media screen and (max-width: 700px) { - .card.large { - height: auto; - text-align: center; - } - .container.container--fluid.home .row { - margin: calc(2vh) auto !important; - } +.pagination-nav__link { + font-size: 1rem; } - -/* V2.0.0-beta.6 style updates */ -.padding-top--md { - padding-top: 2rem !important; +.title_src-components-file- { + font-size: 1.125rem !important; } - -code { - border: none; - background-color: var(--ifm-color-emphasis-200); - color: var(--ifm-color-emphasis-900); +.markdown h1:first-child { + font-size: clamp( + var(--ifm-h1-font-size) * 0.9375, + 1vw, + var(--ifm-h1-font-size) + ); + margin-top: 0.5rem; +} +.link_src-components-faqs-styles-module { + font-size: 1.125rem; } -.DocSearch-Button-Container .DocSearch-Button-Placeholder { - padding: 0 2.25rem 0 0; +/* Docs */ +.docImage_src-components-lightbox-, +.docImage_src-components-lightbox-styles-module, +.docImage_2V3X, +.docImage_917O { + max-width: 100%; } -@media (min-width: 1440px) { - .container--fluid { - max-width: inherit; - } +.generatedIndexPage_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocCategoryGeneratedIndexPage-styles-module + .cardContainer_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocCard-styles-module, +.generatedIndexPage_aEAk .card { + color: var(--color-white); + background: var(--ifm-footer-background-color); +} +.generatedIndexPage_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocCategoryGeneratedIndexPage-styles-module + .cardContainer_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocCard-styles-module:hover, +.generatedIndexPage_aEAk .card:hover { + color: var(--color-white); + background: var(--color-primary-blue); } /* Custom Blog Styles */ @@ -482,17 +891,36 @@ code { display: block; margin-bottom: 0; background: var(--color-off-white); - padding: .5rem 0; + padding: 0.5rem 0; +} + +html[data-theme="dark"] .blog-breadcrumbs { + background-color: rgba(20, 20, 10, 0.75); } + .blog-breadcrumbs a { margin-right: 1.5rem; position: relative; } + .blog-breadcrumbs a[href="#"] { cursor: initial; - color: var(--color-primary-blue); pointer-events: none; } + +/* darkmode breadcrumbs */ +html[data-theme="dark"] .blog-breadcrumbs a[href="#"] { + filter: brightness(var(--filter-brightness-low)); +} + +html[data-theme="dark"] .blog-breadcrumbs a:not(:last-of-type):after { + color: var(--ifm-link-color); +} + +html[data-theme="dark"] .breadcrumbs__item--active .breadcrumbs__link { + filter: brightness(var(--filter-brightness-high)); +} + .blog-breadcrumbs a:not(:last-of-type):after { content: ">"; color: var(--color-primary-blue); @@ -502,7 +930,7 @@ code { bottom: 0; margin: auto 0; width: auto; - font-size: .9rem; + font-size: 0.9rem; pointer-events: none; } @@ -519,17 +947,302 @@ code { flex: 1 0 300px; max-width: 300px; } + .blog-main-row .blog-aside li a { font-size: 1rem; line-height: 1.25; } + +/* footer styles */ +.footer { + font-weight: var(--ifm-font-weight-narrow); + font-size: 0.875rem; + padding-top: var(--ifm-footer-padding-vertical); + padding-bottom: var(--ifm-footer-padding-vertical); +} + +.footer button { + font-size: 0.875rem !important; +} + +.footer a { + color: var(--ifm-footer-color); + text-decoration: none; +} + +.footer a:hover { + text-decoration: underline; + color: var(--ifm-footer-color); +} + +.footer__links { + margin-bottom: 0; +} + +.footer__link-item { + color: var(--ifm-footer-color); + display: flex; + justify-content: space-between; +} + +.footer__items { + list-style: none; + margin-bottom: 1em; + padding-left: 0; + gap: 1.5em; + display: flex; +} + +/* social media icons in footer */ +.footer__items--right { + display: flex; + gap: 1.5em; +} + +/* fontawesome icons in footer */ +.footer .fa-brands { + color: var(--color-footer-accent); + font-size: 1.6rem; + transition: all 0.3s ease-in-out; +} + +.footer .fa-brands:hover { + filter: opacity(0.75); +} + +/* copyright */ +.footer__bottom { + text-align: left; + color: var(--color-footer-accent); + font-size: 0.875rem; +} + +.footer__copyright { + margin-top: 0.5em; +} + +@media (min-width: 997px) and (max-width: 1045px) { + .navbar__link.nav-versioning { + transition: all 0.3s ease-in-out; + } +} + +@media (min-width: 997px) and (max-width: 2560px) { + .container > .row .col { + padding-left: 2.5rem; + } + /* for non row divs like on /category/adapter-development */ + .container > div:not(.row) { + padding-left: 2.5rem; + } +} + +/* Begin @Media Queries */ +@media (min-width: 997px) and (max-width: 1150px) { + nav.navbar a.navbar__link { + font-size: 0.875rem; + } + + .navbar__search .navbar__search-input { + max-width: 160px; + } +} + +@media (max-width: 910px) { + .footer__items { + flex-direction: column; + align-items: flex-start; + } +} + +@media (max-width: 996px) { + .navbar-sidebar__brand + .navbarSidebarToggle_node_modules-\@docusaurus-theme-classic-lib-next-theme-Navbar-styles-module { + margin-right: 0; + margin-left: 0.25em; + } + + .navbar-sidebar__brand > div:first-of-type { + order: 1; + margin-right: 0; + } + + .footer__link-item { + flex-direction: column; + } + + .footer__copyright { + margin-top: 1em; + } +} + +@media screen and (max-width: 750px) { + .card.large { + height: auto; + text-align: center; + } +} + +@media screen and (max-width: 1199px) { + button.DocSearch-Button { + background: var(--ifm-color-gray-100); + } + + [data-theme="dark"] button.DocSearch-Button { + background-color: #ffffff20; + } + + .DocSearch-Button .DocSearch-Search-Icon { + color: var(--docsearch-muted-color); + } + + /* hide search text and keys until 1201px */ + button .DocSearch-Button-Placeholder, + button .DocSearch-Button-Keys { + display: none; + } +} + +@media screen and (min-width: 1200px) { + /* search bar styles */ + button.DocSearch-Button { + border: 1px solid #95a0b1; + border-radius: 5px; + height: 30px; + } + + button.DocSearch-Button:active, + button.DocSearch-Button:focus, + button.DocSearch-Button:hover { + color: var(--docsearch-muted-color); + background: white; + box-shadow: var(--docsearch-searchbox-shadow); + border: 1px solid var(--ifm-link-color); + } + + button .DocSearch-Button-Placeholder { + font-size: 0.875rem; + font-weight: 600; + padding: 0 12px 0 8px; + } + + /* magnifying glass icon */ + button.DocSearch-Button .DocSearch-Search-Icon { + color: var(--docsearch-muted-color); + stroke-width: 2px; + width: 1em; + height: 1em; + } + + /* keys wrapper */ + button .DocSearch-Button-Keys { + border: 1px solid var(--docsearch-muted-color-for-keys); + border-radius: 5px; + padding: 0.24em 0.5em; + gap: 0.1em; + } + + /* single key */ + button .DocSearch-Button-Key { + font-size: 1.25rem; + margin-right: 0; + padding: 4px 4px 2px; + } + + /* Add dark background on hover for searchbox */ + [data-theme="dark"] button.DocSearch-Button:active, + [data-theme="dark"] button.DocSearch-Button:focus, + [data-theme="dark"] button.DocSearch-Button:hover { + background: #ffffff10; + } +} + +@media (min-width: 1440px) { + .container--fluid { + max-width: inherit; + } + + html.docs-version-current .navbar__brand { + background-color: var(--ifm-background-color); + border-right: 1px solid var(--ifm-toc-border-color); + } + + button.DocSearch-Button { + min-width: 175px; + } +} + @media (min-width: 997px) { + html.docs-version-current .navbar__brand { + padding: 0 var(--ifm-navbar-padding-horizontal) 0 0; + min-height: var(--ifm-navbar-height); + max-height: var(--ifm-navbar-height); + width: 300px; + min-width: 5.5em; + } + + html.docs-version-current .navbar { + width: 100%; + margin-left: 0; + } + + .navbar { + padding: 0 var(--ifm-navbar-padding-horizontal) 0 0; + } + + html.docs-version-current + aside + html.docs-version-current + .sidebar_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocSidebar-styles-module { + position: relative; + } + + aside .menu { + padding-left: .75rem; + padding-right: 1rem; + } + + .navbar__item { + padding: var(--ifm-navbar-item-padding-vertical) 0; + } + + .navbar__item.navbar__link { + display: flex; + align-self: stretch; + } + + .navbar__items--right > :last-child { + padding: var(--ifm-navbar-item-padding-vertical) 1em + var(--ifm-navbar-item-padding-vertical) 0.5em; + } + .blog-main-row > main { width: 100%; flex-grow: 1; max-width: calc(100% - var(--doc-sidebar-width)); } + + .table-of-contents::before { + content: "CONTENTS"; + margin-left: 0.5rem; + color: var(--color-nav-sub-level-text); + font-size: 0.875rem; + font-weight: var(--ifm-font-weight-semibold); + } + + [data-theme="dark"] .table-of-contents::before { + color: var(--color-white); + } + + html[data-theme="dark"] .menu-content { + border-right: 1px solid #dadde1; + } + + .docSidebarContainer_src-theme-DocPage-styles-module { + margin-top: calc(-0.8 * var(--ifm-navbar-height)) !important; + } } + .blog-main-row .blog-right-sidebar { max-width: 400px !important; margin-top: 2rem; @@ -539,7 +1252,8 @@ code { margin-bottom: 3rem; } -.blog-post-page .docItemCol_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocItem-styles-module { +.blog-post-page + .docItemCol_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocItem-styles-module { max-width: 70% !important; } @@ -554,7 +1268,8 @@ code { .blog-main-row .blog-right-sidebar { max-width: initial !important; } - .blog-post-page .docItemCol_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocItem-styles-module { + .blog-post-page + .docItemCol_node_modules-\@docusaurus-theme-classic-lib-next-theme-DocItem-styles-module { max-width: 100% !important; } } @@ -622,7 +1337,7 @@ code { } .blog-hero-card { background: var(--ifm-footer-background-color); - color: white; + color: var(--color-white); } .blog-hero-card .button { text-decoration: none; @@ -633,7 +1348,8 @@ code { text-align: left; } @media (max-width: 699px) { - .card.large.blog-hero-card:before, .card.large.blog-hero-card:after { + .card.large.blog-hero-card:before, + .card.large.blog-hero-card:after { display: none; } .blog-hero-card { @@ -653,7 +1369,8 @@ code { .author-header { align-items: center; } -.author-header-left, .author-header-right { +.author-header-left, +.author-header-right { padding: 20px; } .author-header img { @@ -722,10 +1439,8 @@ code { /* Tag Page Styles */ .tag-header { margin: 0 0 var(--ifm-paragraph-margin-bottom); - } - /* CTA Styles */ .docs-cta { padding: 20px 10px; @@ -734,20 +1449,20 @@ code { /*position: sticky;*/ text-align: center; background: var(--color-off-white); - position:relative; - bottom:0; + position: relative; + bottom: 0; max-width: 400px; } .docs-cta h4 { margin-bottom: 10px; } .docs-cta p { - font-size: .9rem; + font-size: 0.9rem; line-height: 21px; } .docs-cta .docs-cta-btn { background: var(--ifm-color-primary); - color: #fff; + color: var(--color-white); border-radius: 5px; border: none; text-decoration: none; @@ -761,11 +1476,10 @@ code { @media (max-width: 996px) { .docs-cta { - display:none; + display: none; } } - .mobile-toc-section { display: none; } @@ -777,13 +1491,17 @@ code { .mobile-toc-section { display: block; } - .mobile-toc-section .tableOfContents_node_modules-\@docusaurus-theme-classic-lib-next-theme-TOC-styles-module { + .mobile-toc-section + .tableOfContents_node_modules-\@docusaurus-theme-classic-lib-next-theme-TOC-styles-module { display: none; } - .mobile-toc-section .tableOfContents_node_modules-\@docusaurus-theme-classic-lib-next-theme-TOC-styles-module.tocActive { + .mobile-toc-section + .tableOfContents_node_modules-\@docusaurus-theme-classic-lib-next-theme-TOC-styles-module.tocActive { display: block; } - .mobile-toc-section .tableOfContents_node_modules-\@docusaurus-theme-classic-lib-next-theme-TOC-styles-module .table-of-contents__left-border { + .mobile-toc-section + .tableOfContents_node_modules-\@docusaurus-theme-classic-lib-next-theme-TOC-styles-module + .table-of-contents__left-border { border-left: none; } #mobile-toc-dropdown button.tocActive:after { @@ -803,7 +1521,7 @@ code { width: 100%; } #mobile-toc-dropdown > button::after { - content: ''; + content: ""; background: var(--ifm-menu-link-sublist-icon) 50% 50% / 2rem 2rem no-repeat; filter: var(--ifm-menu-link-sublist-icon-filter); height: 1.25rem; @@ -825,3 +1543,187 @@ code { .docs-wrapper.docs-doc-page main div.row > article.col--6 { margin-bottom: 1.5rem; } + +/* OneTrust Cookies Settings Btn in Footer */ +footer #ot-sdk-btn, +footer #ot-sdk-btn.optanon-show-settings { + background: none; + border: none; + color: var(--ifm-footer-color); + text-decoration: none; + font-weight: var(--ifm-font-weight-bold); + font-family: var(--ifm-font-family-base); + padding: 0; + line-height: inherit; + font-size: 1.125rem; + font-weight: 400; + cursor: pointer; +} +footer #ot-sdk-btn:hover, +footer #ot-sdk-btn.optanon-show-settings:hover { + background: none; + border: none; + color: var(--ifm-footer-color); + text-decoration: underline; +} + +/* Discourse Forum & Events Page */ +.events-page section, +section.discourse-forum-page { + padding: 4rem 0; +} + +/* New Homepage Styles */ + +.home section { + margin: 6.25rem auto 6.25rem; + max-width: var(--ifm-container-width-xl); + padding: 0 var(--ifm-spacing-horizontal); +} + +.home section.from-the-blog { + margin-bottom: 0; +} +.home section.from-the-community { + margin-top: 0; + +} + +.home .resource-section h2 { + margin: 0; +} + +@media (max-width: 995px) { + .home .resource-section h2 { + text-align: center; + margin: 2rem inherit; + } +} + +section > h2:not(.resource-section) { + margin-bottom: 25px; +} + +.home .resource-section { + display: grid; + grid-template-columns: 1fr; + grid-gap: 16px; + grid-template-areas: + "featH2" + "featResource" + "popH2" + "popResources"; +} + +@media (min-width: 996px) { + .home .resource-section { + display: grid; + grid-template-columns: 2fr 1fr; + row-gap: 25px; + column-gap: calc(var(--ifm-spacing-horizontal) * 4); + grid-template-areas: + "popH2 featH2" + "popResources featResource"; + } +} + +.home .popular-header { + grid-area: popH2; + margin-top: 2.5rem; +} + +@media (min-width: 996px) { + .home .popular-header { + margin-top: 0; + } +} + +.home .popular-resources { + grid-area: popResources; +} + +.home .popular-resources .grid { + display: grid; + grid-template-columns: 1fr; + grid-gap: var(--ifm-spacing-horizontal); +} + +@media (min-width: 996px) { + .home .popular-resources .grid { + grid-template-columns: 1fr 1fr; + grid-template-rows: 1fr 1fr; + grid-gap: calc(var(--ifm-spacing-horizontal) * 2); + } +} + +.home .featured-header { + grid-area: featH2; +} + +.home .featured-resource { + grid-area: featResource; + height: 100%; +} + +.home .swiper .swiper-slide { + height: unset; +} + +.home .swiper.swiper-initialized { + padding-right: 3rem; + padding-left: 3rem; + padding-top: 0rem; + padding-bottom: 6.25rem; + margin-bottom: 0; +} + +.home .swiper .swiper-button-next, .home .swiper .swiper-button-prev { + color: #047377; + font-weight: 800; + position: absolute; + top: 40%; +} + +[data-theme='dark'] .home .swiper-button-next, [data-theme='dark'] .home .swiper-button-prev { + color: #fff; + font-weight: 800; +} + +/* Community Home styles */ +.community-home section { + margin: calc(5vh) auto calc(2vh); + max-width: var(--ifm-container-width-xl); + padding: 0 var(--ifm-spacing-horizontal); +} + + +/* utilities */ +.grid--3-col { + display: grid; + grid-template-columns: repeat(1, 1fr); + grid-gap: var(--ifm-spacing-horizontal) +} + +@media(min-width: 996px) { + .grid--3-col { + grid-template-columns: repeat(3, 1fr); + grid-gap: calc(var(--ifm-spacing-horizontal) * 2); + } +} + +.grid--2-col { + display: grid; + grid-template-columns: repeat(1, 1fr); + grid-gap: var(--ifm-spacing-horizontal) +} + +@media(min-width: 996px) { + .grid--2-col { + grid-template-columns: repeat(2, 1fr); + grid-gap: calc(var(--ifm-spacing-horizontal) * 2); + } +} + +.justify-content-center { + justify-content: center !important; +} diff --git a/website/src/pages/community/events.js b/website/src/pages/community/events.js new file mode 100644 index 00000000000..8002daf0301 --- /dev/null +++ b/website/src/pages/community/events.js @@ -0,0 +1,27 @@ + +import React from 'react'; +import Layout from '@theme/Layout'; +import Head from '@docusaurus/Head'; +import EventsFeed from '@site/src/components/events'; + + +function Events() { + return ( + + + dbt Community Events + + +
    +
    +

    Upcoming dbt Community Events

    +

    Join us for upcoming meetups, conferences, or office hours with the dbt Labs team. Events are online unless explicitly listed as in-person.

    + + +
    +
    +
    + ); +} + +export default Events; diff --git a/website/src/pages/community/forum.js b/website/src/pages/community/forum.js new file mode 100644 index 00000000000..346d96e9adf --- /dev/null +++ b/website/src/pages/community/forum.js @@ -0,0 +1,26 @@ +import React from 'react'; +import Layout from '@theme/Layout'; +import Head from '@docusaurus/Head'; +import { DiscourseFeed } from '@site/src/components/discourse'; + +function Events() { + return ( + + + dbt Community Forum + + +
    +
    +

    dbt Community Forum

    +

    The dbt Community Forum is the preferred platform for support questions as well as a space for long-lived discussions about dbt, analytics engineering, and the analytics profession. It's a place for us to build up a long-lasting knowledge base around the common challenges, opportunities, and patterns we work with every day.

    + + + +
    +
    +
    + ); +} + +export default Events; diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 0869329e9fb..5c9de9e85b2 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -1,283 +1,161 @@ import React from 'react'; -import Link from '@docusaurus/Link'; -import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; -import useBaseUrl from '@docusaurus/useBaseUrl'; import Layout from '@theme/Layout'; -import classnames from 'classnames'; import Head from '@docusaurus/Head'; +import Card from '@site/src/components/card'; +import BlogPostCard from '@site/src/components/blogPostCard'; +import Hero from '@site/src/components/hero'; +import PostCarousel from '@site/src/components/postCarousel'; +import allBlogData from './../../.docusaurus/docusaurus-plugin-content-blog/default/blog-archive-80c.json' + + const bannerAnimation = require('@site/static/img/banner-white.svg'); function getBanner() { - return {__html: bannerAnimation}; + return { __html: bannerAnimation }; }; function Home() { - const context = useDocusaurusContext(); + + const recentBlogData = allBlogData.blogPosts.slice(0, 6).reduce((accumulator, currentValue) => { + let postMetaData = { + title: currentValue.metadata.title, + date: currentValue.metadata.formattedDate, + readingTime: Math.round(currentValue.metadata.readingTime), + description: currentValue.metadata.description, + link: currentValue.metadata.permalink, + } + accumulator.push(postMetaData) + return accumulator + }, []) + const featuredResource = { + title: "How we structure our dbt projects", + description: "Our hands-on learnings for how to structure your dbt project for success and gain insights into the principles of analytics engineering.", + link: "/guides/best-practices/how-we-structure/1-guide-overview", + image: "/img/structure-dbt-projects.png" + } return ( <> - -
    -
    -
    -

    Get started

    -

    If you’re new to dbt™, start here. These resources will get you off to a strong start:

    -
    -
    -
    -
    -
    -
    -

    What is dbt?

    -
    -
    -

    - Some of the very first questions a new user has are covered in this introduction. -

    -
    -
    - - Learn More - -
    -
    -
    -
    -
    -
    -

    Getting started guide

    -
    -
    -

    - Read this guide to learn how to build, test, and deploy a new dbt project. -

    -
    -
    - - Learn more - -
    -
    -
    -
    -
    -
    -

    dbt Learn

    -
    -
    -

    - Learn dbt on your own time with our on demand course or sign up for an upcoming, live public course. -

    -
    -
    - - Learn Now - -
    -
    -
    +
    + +
    +

    Popular resources

    +
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    -
    -
    -
    -
    -

    Build your project

    -

    If you’re getting comfortable with dbt, bookmark these resources! They’ll help you level up quickly.

    -
    -
    -
    -
    -
    -
    -

    Docs

    -
    -
    -

    - The core concepts of dbt, from models, to sources, to tests. -

    -
    -
    - - Read Up - -
    -
    -
    -
    -
    -
    -

    Reference

    -
    -
    -

    - The technical reference for dbt configurations. You’ll need to know the basics of dbt before using this section. -

    -
    -
    - - Browse Reference - -
    -
    -
    -
    -
    -
    -

    FAQs

    -
    -
    -

    - Commonly asked questions about dbt. -

    -
    -
    - - Get Answers - -
    -
    -
    +
    +

    Featured resource

    +
    + +
    +
    + +
    +

    The latest from the Developer Blog

    + +
    + +
    +

    From the dbt Community

    +
    +
    +
    -
    -
    -

    If you’re a dbt Cloud™ user, these resources may also be helpful:

    -
    -
    -
    -
    -
    -
    -

    dbt Cloud guides

    -
    -
    -

    - Guides to help you set up your dbt project in dbt Cloud -

    -
    -
    - - Cloud Overview - -
    -
    -
    -
    -
    -
    -

    dbt Cloud API

    -
    -
    -

    - Technical reference docs for using the dbt Cloud APIs. -

    -
    -
    - - API docs - -
    -
    -
    +
    +
    -
    -
    -
    -
    -

    Learn from the community

    -

    Every data team uses dbt to solve different analytics engineering problems. It can be useful to learn how other teams are using dbt with the following resources:

    -
    -
    -
    -
    -
    -
    -

    Discourse

    -
    -
    -

    - Common use cases and helpful articles from the community have been published here -

    -
    -
    - - Get Advice - -
    -
    -
    -
    -
    -
    -

    Slack

    -
    -
    -

    - Where the dbt community hangs out, discusses issues, and troubleshoots problems together -

    -
    -
    - - Join us on Slack - -
    -
    -
    -
    -
    -
    -

    Example projects

    -
    -
    -

    - A list of some dbt projects in the wild -

    -
    -
    - - View Projects - -
    -
    -
    +
    +
    -
    -
    -
    -
    -

    Having trouble?

    -

    If you're having trouble, check out our guide on Getting Help for information on getting support and asking questions in the community.

    -
    -
    -
    -
    +
    +
    + +
    +

    Use dbt like a pro

    +
    +
    + +
    +
    + +
    +
    + +
    +
    +
    + +
    + +
    ); diff --git a/website/src/pages/styles.js b/website/src/pages/styles.js index 0d3d2025ccb..3f326104633 100644 --- a/website/src/pages/styles.js +++ b/website/src/pages/styles.js @@ -38,11 +38,11 @@ function Styles() {

    Linked Markdown Code Blocks

    {`
     \`\`\`
    -[view the license](license)
    +[view the intro](docs/introduction)
     \`\`\`
     `}
    -[view the license](license) +[view the intro](docs/introduction)

    Use a backslash to escape linking:

    @@ -105,9 +105,9 @@ description: "this is \[an escaped link](docs.getdbt.com)"

    FAQ

    -
    {``}
    - - +
    {``}
    + +
    @@ -135,34 +135,8 @@ password: hunter2
    -

    Markdown Link

    -Links to pages can be specified using: -
  • Just the id¹ of the document, if the id is unique. Note: the id may be specified in the YAML front-matter of a document. If not, then it defaults to the filename.
  • -
  • A relative id of the document. Note: this is required when two documents have the same id.
  • -
  • Or, a path to the document (with .md file extension), relative to the website/docs/ directory. Note: this is required for pages where the id looks like a filename (e.g. profiles.yml)
  • -
    -Bad links will appear with red underlines when building locally, and will cause an error in a deploy preview. -
    -
    {`[link to unique id](available-adapters)
    -[disambiguated link to duplicate id](docs/about/overview)
    -[second disambiguated link to duplicate id](dbt-cli/install/overview)
    -[file paths work too](docs/about/overview.md)
    -[link to document where id looks like a filename](reference/profiles.yml.md)
    -[a bad link](bad-link)
    -`}
    - - link to unique id -
    - disambiguated link to duplicate id -
    - second disambiguated link to duplicate id -
    - file paths work too -
    - link to document where id looks like a file -
    - a bad link - +

    Markdown Links

    + Refer to the Links section of the Content Style Guide to read about how you can use links in the dbt product documentation.
    diff --git a/website/src/theme/AnnouncementBar/index.js b/website/src/theme/AnnouncementBar/index.js new file mode 100644 index 00000000000..f82645c9b1c --- /dev/null +++ b/website/src/theme/AnnouncementBar/index.js @@ -0,0 +1,76 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +import React from 'react'; +import clsx from 'clsx'; +import {useThemeConfig, useAnnouncementBar} from '@docusaurus/theme-common'; +import {translate} from '@docusaurus/Translate'; +import IconClose from '@theme/IconClose'; +import styles from './styles.module.css'; +export default function AnnouncementBar() { + const {isActive, close} = useAnnouncementBar(); + const {announcementBar, announcementBarActive, announcementBarLink} = useThemeConfig(); + + if (!isActive || !announcementBarActive) { + return null; + } + + const {content, backgroundColor, textColor, isCloseable} = announcementBar; + + return ( +
    + {isCloseable &&
    } + {announcementBarLink ? ( + + + + ) : ( + + )} + + {isCloseable ? ( + + ) : null} +
    + ); +} + +function AnnouncementBarContent({ content, styles }) { + return ( +
    + ) +} diff --git a/website/src/theme/AnnouncementBar/styles.module.css b/website/src/theme/AnnouncementBar/styles.module.css new file mode 100644 index 00000000000..e76f981dfbb --- /dev/null +++ b/website/src/theme/AnnouncementBar/styles.module.css @@ -0,0 +1,67 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +:root { + --docusaurus-announcement-bar-height: auto; +} + +.announcementBar { + display: flex; + align-items: center; + height: auto; +} + +html[data-announcement-bar-initially-dismissed='true'] .announcementBar { + display: none; +} + +.announcementBarPlaceholder { + flex: 0 0 10px; +} + +.announcementBarClose { + flex: 0 0 30px; + align-self: stretch; + padding: 0; + line-height: 0; + color: var(--ifm-color-white); + opacity: 1; +} + +.announcementBarContent { + flex: 1 1 auto; + font-size: 1.125rem; + text-align: center; + padding: 15px 0; +} + +@media print { + .announcementBar { + display: none; + } +} + +.announcementBarContent a { + color: inherit; + text-decoration: underline; +} + +@media (min-width: 997px) { + .announcementBarPlaceholder, + .announcementBarClose { + flex-basis: 50px; + } +} + +.announcementBarLink { + color: var(--ifm-color-white); + width: 100%; +} +.announcementBarLink:hover { + color: var(--ifm-color-white); + text-decoration: none; +} diff --git a/website/src/theme/BlogPostItem/index.js b/website/src/theme/BlogPostItem/index.js index 9e93d67efe3..071319aae31 100644 --- a/website/src/theme/BlogPostItem/index.js +++ b/website/src/theme/BlogPostItem/index.js @@ -12,7 +12,7 @@ * - Add image above title for blog posts */ -import React from 'react'; +import React, { useEffect } from 'react'; import clsx from 'clsx'; import {MDXProvider} from '@mdx-js/react'; import Translate, {translate} from '@docusaurus/Translate'; @@ -101,6 +101,36 @@ function BlogPostItem(props) { ); }; + // dbt custom - send blog context to datalayer to send to snowplow + useEffect(() => { + let blogContext = { + event: 'blogContext', + blogAuthor: '', + blogCategory: '', + blogDate: formattedDate ? formattedDate : undefined + } + + if(authors && authors.length > 0) { + authors.map((author, i) => { + blogContext.blogAuthor += + `${author.name}${i !== authors.length - 1 ? ', ' : ''}` + }) + } + + if(tags && tags.length > 0) { + tags.map((tag, i) => { + blogContext.blogCategory += + `${tag.label}${i !== tags.length - 1 ? ', ' : ''}` + }) + } + + // Only send to datalayer if blog post page + if(isBlogPostPage) { + window.dataLayer = window.dataLayer || []; + dataLayer && dataLayer.push(blogContext) + } + }, []) + return ( <> {frontMatter.canonical_url && ( diff --git a/website/src/theme/BlogSidebar/index.js b/website/src/theme/BlogSidebar/index.js index 0e03f696673..c68112aa0a1 100644 --- a/website/src/theme/BlogSidebar/index.js +++ b/website/src/theme/BlogSidebar/index.js @@ -29,7 +29,7 @@ export default function BlogSidebar({sidebar, tagData}) { Categories
      - {tagData && tagData.map((tag, i) => { + {tagData && tagData.filter(tag => tag.is_featured)?.map((tag, i) => { if(!tag) return null return ( diff --git a/website/src/theme/DocItem/index.js b/website/src/theme/DocItem/index.js index fc692d27631..5337c58b808 100644 --- a/website/src/theme/DocItem/index.js +++ b/website/src/theme/DocItem/index.js @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ -import React, {useState, useEffect, useContext} from 'react'; +import React, { useState, useEffect, useContext } from 'react'; import clsx from 'clsx'; import DocPaginator from '@theme/DocPaginator'; import DocVersionBanner from '@theme/DocVersionBanner'; @@ -15,16 +15,17 @@ import TOC from '@theme/TOC'; import TOCCollapsible from '@theme/TOCCollapsible'; import Heading from '@theme/Heading'; import styles from './styles.module.css'; -import {ThemeClassNames, useWindowSize} from '@docusaurus/theme-common'; +import { ThemeClassNames, useWindowSize } from '@docusaurus/theme-common'; import DocBreadcrumbs from '@theme/DocBreadcrumbs'; +import DocSearchWeight from '@site/src/components/docSearchWeight'; // dbt Custom import VersionContext from '../../stores/VersionContext' import getElements from '../../utils/get-html-elements'; export default function DocItem(props) { - const {content: DocContent} = props; - const {metadata, frontMatter, assets} = DocContent; + const { content: DocContent } = props; + const { metadata, frontMatter, assets } = DocContent; const { keywords, hide_title: hideTitle, @@ -32,7 +33,7 @@ export default function DocItem(props) { toc_min_heading_level: tocMinHeadingLevel, toc_max_heading_level: tocMaxHeadingLevel, } = frontMatter; - const {description, title} = metadata; + const { description, title } = metadata; const image = assets.image ?? frontMatter.image; // We only add a title if: // - user asks to hide it with front matter // - the markdown content does not already contain a top-level h1 heading @@ -49,6 +50,10 @@ export default function DocItem(props) { // If term has cta property set, show that cta const termCTA = frontMatter?.cta && frontMatter.cta + // dbt Custom + // If the page has a search_weight value, apply that value + const searchWeight = frontMatter?.search_weight && frontMatter.search_weight + // This hides any TOC items not in // html markdown headings for current version. const { version: dbtVersion } = useContext(VersionContext) @@ -58,28 +63,51 @@ export default function DocItem(props) { async function fetchElements() { // get html elements const headings = await getElements(".markdown h1, .markdown h2, .markdown h3, .markdown h4, .markdown h5, .markdown h6") - // if headings exist on page // compare against toc - if(DocContent.toc && headings && headings.length) { - let updated = DocContent.toc.reduce((acc, item) => { + if (DocContent.toc && headings && headings.length) { + // make new TOC object + let updated = Array.from(headings).reduce((acc, item) => { // If heading id and toc item id match found // include in updated toc - let found = Array.from(headings).find(heading => + let found = DocContent.toc.find(heading => heading.id.includes(item.id) ) // If toc item is not in headings // do not include in toc // This means heading is versioned - if(found) - acc.push(item) + + let makeToc = (heading) => { + let level; + if (heading.nodeName === "H2") { + level = 2 + } else if (heading.nodeName === "H3") { + level = 3 + } else { + level = null + } + + return { + value: heading.innerHTML, + id: heading.id, + level: level && level + } + } + + if (found) { + acc.push(makeToc(item)) + } else if (!found) { + acc.push(makeToc(item)) + } else { + null + } return acc }, []) // If updated toc different than current // If so, show loader and update toc - if(currentToc.length !== updated.length) { + if (currentToc.length !== updated.length) { setTocReady(false) // This timeout provides enough time to show the loader // Otherwise the content updates immediately @@ -136,11 +164,11 @@ export default function DocItem(props) {
      {/* - Title can be declared inside md content or declared through - front matter and added manually. To make both cases consistent, - the added title is added under the same div.markdown block - See https://github.com/facebook/docusaurus/pull/4882#issuecomment-853021120 - */} + Title can be declared inside md content or declared through + front matter and added manually. To make both cases consistent, + the added title is added under the same div.markdown block + See https://github.com/facebook/docusaurus/pull/4882#issuecomment-853021120 + */} {shouldAddTitle && (
      {title} @@ -148,6 +176,8 @@ export default function DocItem(props) { )} + +
      @@ -165,13 +195,14 @@ export default function DocItem(props) { maxHeadingLevel={tocMaxHeadingLevel} className={ThemeClassNames.docs.docTocDesktop} featured_cta={termCTA && termCTA} + editUrl={metadata?.editUrl && metadata.editUrl} /> ) : ( Loading )}
    diff --git a/website/src/theme/MDXComponents/index.js b/website/src/theme/MDXComponents/index.js index 445a2321abe..c9514a320b3 100644 --- a/website/src/theme/MDXComponents/index.js +++ b/website/src/theme/MDXComponents/index.js @@ -4,12 +4,12 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ -import React, { isValidElement } from 'react'; -import Head from '@docusaurus/Head'; -import CodeBlock from '@theme/CodeBlock'; -import Heading from '@theme/Heading'; -import Details from '@theme/Details'; -import './styles.css'; // MDX elements are wrapped through the MDX pragma. In some cases (notably usage +import React, { isValidElement } from "react"; +import Head from "@docusaurus/Head"; +import CodeBlock from "@theme/CodeBlock"; +import Heading from "@theme/Heading"; +import Details from "@theme/Details"; +import "./styles.css"; // MDX elements are wrapped through the MDX pragma. In some cases (notably usage // with Head/Helmet) we need to unwrap those elements. /* @@ -33,6 +33,13 @@ import WistiaVideo from '@site/src/components/wistia'; import VersionBlock from '@site/src/components/versionBlock'; import Var from '@site/src/components/variable'; import Term from '@site/src/components/term'; +import EventsFeed from '@site/src/components/events'; +import { DiscourseFeed, DiscourseHelpFeed } from '@site/src/components/discourse'; +import Hero from '@site/src/components/hero' +import Card from '@site/src/components/card' +import Callout from '@site/src/components/callout' +import BlogPostCard from '@site/src/components/blogPostCard'; +import PostCarousel from '@site/src/components/postCarousel'; function unwrapMDXElement(element) { if (element?.props?.mdxType && element?.props?.originalType) { @@ -46,27 +53,27 @@ function unwrapMDXElement(element) { const MDXComponents = { head: (props) => { const unwrappedChildren = React.Children.map(props.children, (child) => - unwrapMDXElement(child), + unwrapMDXElement(child) ); return {unwrappedChildren}; }, code: (props) => { const inlineElements = [ - 'a', - 'b', - 'big', - 'i', - 'span', - 'em', - 'strong', - 'sup', - 'sub', - 'small', + "a", + "b", + "big", + "i", + "span", + "em", + "strong", + "sup", + "sub", + "small", ]; const shouldBeInline = React.Children.toArray(props.children).every( (el) => - (typeof el === 'string' && !el.includes('\n')) || - (React.isValidElement(el) && inlineElements.includes(el.props.mdxType)), + (typeof el === "string" && !el.includes("\n")) || + (React.isValidElement(el) && inlineElements.includes(el.props.mdxType)) ); return shouldBeInline ? : ; }, @@ -74,7 +81,7 @@ const MDXComponents = { pre: (props) => ( @@ -83,7 +90,7 @@ const MDXComponents = { const items = React.Children.toArray(props.children); // Split summary item from the rest to pass it as a separate prop to the // Details theme component - const summary = items.find((item) => item?.props?.mdxType === 'summary'); + const summary = items.find((item) => item?.props?.mdxType === "summary"); const children = <>{items.filter((item) => item !== summary)}; return (
    @@ -98,15 +105,20 @@ const MDXComponents = { h5: (props) => , h6: (props) => , + BlogPostCard: BlogPostCard, + Callout: Callout, + Card: Card, Changelog: Changelog, CloudCore: CloudCore, Collapsible: Collapsible, FAQ: FAQ, FAQList: FAQList, File: File, + Hero: Hero, Lightbox: Lightbox, Link: Link, LoomVideo: LoomVideo, + PostCarousel: PostCarousel, Tabs: Tabs, TabItem: TabItem, Snippet: Snippet, @@ -116,5 +128,9 @@ const MDXComponents = { VersionBlock: VersionBlock, Var: Var, Term: Term, + EventsFeed: EventsFeed, + DiscourseFeed: DiscourseFeed, + DiscourseHelpFeed: DiscourseHelpFeed, + Card: Card, }; export default MDXComponents; diff --git a/website/src/theme/TOC/index.js b/website/src/theme/TOC/index.js index cbdbfa05f61..e3e34f6f6b7 100644 --- a/website/src/theme/TOC/index.js +++ b/website/src/theme/TOC/index.js @@ -7,6 +7,7 @@ import React from 'react'; import clsx from 'clsx'; import TOCItems from '@theme/TOCItems'; +import EditThisPage from '@theme/EditThisPage'; import styles from './styles.module.css'; // Using a custom className // This prevents TOC highlighting to highlight TOCInline/TOCCollapsible by mistake @@ -16,14 +17,22 @@ import CTA from '../../components/cta'; const LINK_CLASS_NAME = 'table-of-contents__link toc-highlight'; const LINK_ACTIVE_CLASS_NAME = 'table-of-contents__link--active'; -function TOC({className, featured_cta, ...props}) { +function TOC({className, featured_cta, editUrl, ...props}) { + return (
    + + {editUrl && ( +
    + +
    )} + {featured_cta && ( )} diff --git a/website/static/css/__mocks__/styleMock.js b/website/static/css/__mocks__/styleMock.js new file mode 100644 index 00000000000..f053ebf7976 --- /dev/null +++ b/website/static/css/__mocks__/styleMock.js @@ -0,0 +1 @@ +module.exports = {}; diff --git a/website/static/img/adapter-guide/0-full-release-notes.png b/website/static/img/adapter-guide/0-full-release-notes.png new file mode 100644 index 00000000000..7acb59d8ffa Binary files /dev/null and b/website/static/img/adapter-guide/0-full-release-notes.png differ diff --git a/website/static/img/adapter-guide/1-announcement.png b/website/static/img/adapter-guide/1-announcement.png new file mode 100644 index 00000000000..32f5cd6ba5d Binary files /dev/null and b/website/static/img/adapter-guide/1-announcement.png differ diff --git a/website/static/img/adapter-guide/2-short-description.png b/website/static/img/adapter-guide/2-short-description.png new file mode 100644 index 00000000000..547b856ebb0 Binary files /dev/null and b/website/static/img/adapter-guide/2-short-description.png differ diff --git a/website/static/img/adapter-guide/3-additional-resources.png b/website/static/img/adapter-guide/3-additional-resources.png new file mode 100644 index 00000000000..575157b9d54 Binary files /dev/null and b/website/static/img/adapter-guide/3-additional-resources.png differ diff --git a/website/static/img/adapter-guide/4-installation.png b/website/static/img/adapter-guide/4-installation.png new file mode 100644 index 00000000000..c728ff6952b Binary files /dev/null and b/website/static/img/adapter-guide/4-installation.png differ diff --git a/website/static/img/adapter-guide/5-coming-up.png b/website/static/img/adapter-guide/5-coming-up.png new file mode 100644 index 00000000000..4681ee87a1b Binary files /dev/null and b/website/static/img/adapter-guide/5-coming-up.png differ diff --git a/website/static/img/adapter-guide/6-thank-contribs.png b/website/static/img/adapter-guide/6-thank-contribs.png new file mode 100644 index 00000000000..b2db6df4856 Binary files /dev/null and b/website/static/img/adapter-guide/6-thank-contribs.png differ diff --git a/website/static/img/adapter-guide/adapter architecture - postgres.png b/website/static/img/adapter-guide/adapter architecture - postgres.png new file mode 100644 index 00000000000..d64dbc95026 Binary files /dev/null and b/website/static/img/adapter-guide/adapter architecture - postgres.png differ diff --git a/website/static/img/api-access-profile.png b/website/static/img/api-access-profile.png index 71a19e15ec2..deade9f2135 100644 Binary files a/website/static/img/api-access-profile.png and b/website/static/img/api-access-profile.png differ diff --git a/website/static/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Resolve-to-single-key.jpg b/website/static/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Resolve-to-single-key.jpg new file mode 100644 index 00000000000..de9f303a109 Binary files /dev/null and b/website/static/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Resolve-to-single-key.jpg differ diff --git a/website/static/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Staggered-Joins.jpg b/website/static/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Staggered-Joins.jpg new file mode 100644 index 00000000000..1e79fbfb983 Binary files /dev/null and b/website/static/img/blog/2022-05-19-redshift-configurations-dbt-model-optimizations/Staggered-Joins.jpg differ diff --git a/website/static/img/blog/2022-07-12-change-data-capture-metrics/fct-income-dag.png b/website/static/img/blog/2022-07-12-change-data-capture-metrics/fct-income-dag.png new file mode 100644 index 00000000000..861b8591568 Binary files /dev/null and b/website/static/img/blog/2022-07-12-change-data-capture-metrics/fct-income-dag.png differ diff --git a/website/static/img/blog/2022-07-12-change-data-capture-metrics/final-dag.png b/website/static/img/blog/2022-07-12-change-data-capture-metrics/final-dag.png new file mode 100644 index 00000000000..37243ff06dc Binary files /dev/null and b/website/static/img/blog/2022-07-12-change-data-capture-metrics/final-dag.png differ diff --git a/website/static/img/blog/2022-07-12-change-data-capture-metrics/income-meme.png b/website/static/img/blog/2022-07-12-change-data-capture-metrics/income-meme.png new file mode 100644 index 00000000000..4174b3fceb6 Binary files /dev/null and b/website/static/img/blog/2022-07-12-change-data-capture-metrics/income-meme.png differ diff --git a/website/static/img/blog/2022-07-12-change-data-capture-metrics/income-report-versions-dag.png b/website/static/img/blog/2022-07-12-change-data-capture-metrics/income-report-versions-dag.png new file mode 100644 index 00000000000..63eb3400ca6 Binary files /dev/null and b/website/static/img/blog/2022-07-12-change-data-capture-metrics/income-report-versions-dag.png differ diff --git a/website/static/img/blog/2022-07-12-change-data-capture-metrics/int-income-history-dag.png b/website/static/img/blog/2022-07-12-change-data-capture-metrics/int-income-history-dag.png new file mode 100644 index 00000000000..00684a680a2 Binary files /dev/null and b/website/static/img/blog/2022-07-12-change-data-capture-metrics/int-income-history-dag.png differ diff --git a/website/static/img/blog/2022-07-12-change-data-capture-metrics/snapshots-dag.png b/website/static/img/blog/2022-07-12-change-data-capture-metrics/snapshots-dag.png new file mode 100644 index 00000000000..801331f7057 Binary files /dev/null and b/website/static/img/blog/2022-07-12-change-data-capture-metrics/snapshots-dag.png differ diff --git a/website/static/img/blog/2022-07-13-star-sql-love-letter/star-is-born.jpg b/website/static/img/blog/2022-07-13-star-sql-love-letter/star-is-born.jpg new file mode 100644 index 00000000000..53192f53206 Binary files /dev/null and b/website/static/img/blog/2022-07-13-star-sql-love-letter/star-is-born.jpg differ diff --git a/website/static/img/blog/2022-07-13-star-sql-love-letter/utils-madness-1.png b/website/static/img/blog/2022-07-13-star-sql-love-letter/utils-madness-1.png new file mode 100644 index 00000000000..faf078cd2bc Binary files /dev/null and b/website/static/img/blog/2022-07-13-star-sql-love-letter/utils-madness-1.png differ diff --git a/website/static/img/blog/2022-07-13-star-sql-love-letter/utils-madness-2.png b/website/static/img/blog/2022-07-13-star-sql-love-letter/utils-madness-2.png new file mode 100644 index 00000000000..94b99f28a8c Binary files /dev/null and b/website/static/img/blog/2022-07-13-star-sql-love-letter/utils-madness-2.png differ diff --git a/website/static/img/blog/2022-07-19-migrating-from-stored-procs/dbt-approach-model.png b/website/static/img/blog/2022-07-19-migrating-from-stored-procs/dbt-approach-model.png new file mode 100644 index 00000000000..66b85b343cc Binary files /dev/null and b/website/static/img/blog/2022-07-19-migrating-from-stored-procs/dbt-approach-model.png differ diff --git a/website/static/img/blog/2022-07-19-migrating-from-stored-procs/dbt-diagram.png b/website/static/img/blog/2022-07-19-migrating-from-stored-procs/dbt-diagram.png new file mode 100644 index 00000000000..37d3ed910bd Binary files /dev/null and b/website/static/img/blog/2022-07-19-migrating-from-stored-procs/dbt-diagram.png differ diff --git a/website/static/img/blog/2022-07-19-migrating-from-stored-procs/stored-procs-diagram.png b/website/static/img/blog/2022-07-19-migrating-from-stored-procs/stored-procs-diagram.png new file mode 100644 index 00000000000..b24faaeab4c Binary files /dev/null and b/website/static/img/blog/2022-07-19-migrating-from-stored-procs/stored-procs-diagram.png differ diff --git a/website/static/img/blog/2022-07-26-pre-commit-dbt/checks-failed.png b/website/static/img/blog/2022-07-26-pre-commit-dbt/checks-failed.png new file mode 100644 index 00000000000..a4d116e2f1e Binary files /dev/null and b/website/static/img/blog/2022-07-26-pre-commit-dbt/checks-failed.png differ diff --git a/website/static/img/blog/2022-07-26-pre-commit-dbt/customers-model.png b/website/static/img/blog/2022-07-26-pre-commit-dbt/customers-model.png new file mode 100644 index 00000000000..a5bc90b8483 Binary files /dev/null and b/website/static/img/blog/2022-07-26-pre-commit-dbt/customers-model.png differ diff --git a/website/static/img/blog/2022-07-26-pre-commit-dbt/define-strategy.png b/website/static/img/blog/2022-07-26-pre-commit-dbt/define-strategy.png new file mode 100644 index 00000000000..29c9ec6b7ce Binary files /dev/null and b/website/static/img/blog/2022-07-26-pre-commit-dbt/define-strategy.png differ diff --git a/website/static/img/blog/2022-07-26-pre-commit-dbt/error-logs.png b/website/static/img/blog/2022-07-26-pre-commit-dbt/error-logs.png new file mode 100644 index 00000000000..e4118d11598 Binary files /dev/null and b/website/static/img/blog/2022-07-26-pre-commit-dbt/error-logs.png differ diff --git a/website/static/img/blog/2022-07-26-pre-commit-dbt/install-pre-commit.png b/website/static/img/blog/2022-07-26-pre-commit-dbt/install-pre-commit.png new file mode 100644 index 00000000000..53b80639266 Binary files /dev/null and b/website/static/img/blog/2022-07-26-pre-commit-dbt/install-pre-commit.png differ diff --git a/website/static/img/blog/2022-07-26-pre-commit-dbt/next-strategy.png b/website/static/img/blog/2022-07-26-pre-commit-dbt/next-strategy.png new file mode 100644 index 00000000000..e55aed176df Binary files /dev/null and b/website/static/img/blog/2022-07-26-pre-commit-dbt/next-strategy.png differ diff --git a/website/static/img/blog/2022-07-26-pre-commit-dbt/testing-running.png b/website/static/img/blog/2022-07-26-pre-commit-dbt/testing-running.png new file mode 100644 index 00000000000..f1980332db0 Binary files /dev/null and b/website/static/img/blog/2022-07-26-pre-commit-dbt/testing-running.png differ diff --git a/website/static/img/blog/2022-07-26-pre-commit-dbt/why-not-both-meme.png b/website/static/img/blog/2022-07-26-pre-commit-dbt/why-not-both-meme.png new file mode 100644 index 00000000000..e388c5ced80 Binary files /dev/null and b/website/static/img/blog/2022-07-26-pre-commit-dbt/why-not-both-meme.png differ diff --git a/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/crypto-meme.png b/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/crypto-meme.png new file mode 100644 index 00000000000..605d97a38e7 Binary files /dev/null and b/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/crypto-meme.png differ diff --git a/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/metrics-dag.png b/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/metrics-dag.png new file mode 100644 index 00000000000..71218089305 Binary files /dev/null and b/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/metrics-dag.png differ diff --git a/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/semantic-layer-description.png b/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/semantic-layer-description.png new file mode 100644 index 00000000000..b01b2b619b6 Binary files /dev/null and b/website/static/img/blog/2022-07-27-getting-started-with-the-dbt-semantic-layer/semantic-layer-description.png differ diff --git a/website/static/img/blog/2022-08-12-model-timing/model_timing_after.png b/website/static/img/blog/2022-08-12-model-timing/model_timing_after.png new file mode 100644 index 00000000000..2d9cdd12db1 Binary files /dev/null and b/website/static/img/blog/2022-08-12-model-timing/model_timing_after.png differ diff --git a/website/static/img/blog/2022-08-12-model-timing/model_timing_before.png b/website/static/img/blog/2022-08-12-model-timing/model_timing_before.png new file mode 100644 index 00000000000..d55ed331227 Binary files /dev/null and b/website/static/img/blog/2022-08-12-model-timing/model_timing_before.png differ diff --git a/website/static/img/blog/2022-08-12-model-timing/snowflake_query_plan.png b/website/static/img/blog/2022-08-12-model-timing/snowflake_query_plan.png new file mode 100644 index 00000000000..f4c4502ce05 Binary files /dev/null and b/website/static/img/blog/2022-08-12-model-timing/snowflake_query_plan.png differ diff --git a/website/static/img/blog/2022-08-22-narrative-modeling/dimensional-modeling-dag.png b/website/static/img/blog/2022-08-22-narrative-modeling/dimensional-modeling-dag.png new file mode 100644 index 00000000000..43fdef2796d Binary files /dev/null and b/website/static/img/blog/2022-08-22-narrative-modeling/dimensional-modeling-dag.png differ diff --git a/website/static/img/blog/2022-08-22-narrative-modeling/narrative-modeling-dag.png b/website/static/img/blog/2022-08-22-narrative-modeling/narrative-modeling-dag.png new file mode 100644 index 00000000000..658d59a1607 Binary files /dev/null and b/website/static/img/blog/2022-08-22-narrative-modeling/narrative-modeling-dag.png differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/captain-planet-combine.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/captain-planet-combine.gif new file mode 100644 index 00000000000..d37d6c4d9c0 Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/captain-planet-combine.gif differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/crown-adjustment.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/crown-adjustment.gif new file mode 100644 index 00000000000..0874301461b Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/crown-adjustment.gif differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/devil-prada-glacial-pace.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/devil-prada-glacial-pace.gif new file mode 100644 index 00000000000..32a1be8c683 Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/devil-prada-glacial-pace.gif differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/hobbit-adventure.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/hobbit-adventure.gif new file mode 100644 index 00000000000..842462a5e03 Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/hobbit-adventure.gif differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/office-its-a-date.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/office-its-a-date.gif new file mode 100644 index 00000000000..a075798764b Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/office-its-a-date.gif differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/parks-and-rec-ben-wyatt.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/parks-and-rec-ben-wyatt.gif new file mode 100644 index 00000000000..4c481bb9ce4 Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/parks-and-rec-ben-wyatt.gif differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/supranos-bada-bing.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/supranos-bada-bing.gif new file mode 100644 index 00000000000..4e20e2bbad6 Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/supranos-bada-bing.gif differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/supranos-boom.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/supranos-boom.gif new file mode 100644 index 00000000000..328dddea3f6 Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/supranos-boom.gif differ diff --git a/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/top-gun-classified.gif b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/top-gun-classified.gif new file mode 100644 index 00000000000..b4bcaab77fa Binary files /dev/null and b/website/static/img/blog/2022-09-07-leverage-accounting-principles-when-financial-modeling/top-gun-classified.gif differ diff --git a/website/static/img/blog/2022-09-08-konmari-your-query-migration/buried-in-boxes.webp b/website/static/img/blog/2022-09-08-konmari-your-query-migration/buried-in-boxes.webp new file mode 100644 index 00000000000..5833eaa437d Binary files /dev/null and b/website/static/img/blog/2022-09-08-konmari-your-query-migration/buried-in-boxes.webp differ diff --git a/website/static/img/blog/2022-09-08-konmari-your-query-migration/cat_kitchen.jpeg b/website/static/img/blog/2022-09-08-konmari-your-query-migration/cat_kitchen.jpeg new file mode 100644 index 00000000000..de74601dce2 Binary files /dev/null and b/website/static/img/blog/2022-09-08-konmari-your-query-migration/cat_kitchen.jpeg differ diff --git a/website/static/img/blog/2022-09-08-konmari-your-query-migration/fully-konmarid-project.png b/website/static/img/blog/2022-09-08-konmari-your-query-migration/fully-konmarid-project.png new file mode 100644 index 00000000000..f89659f26a0 Binary files /dev/null and b/website/static/img/blog/2022-09-08-konmari-your-query-migration/fully-konmarid-project.png differ diff --git a/website/static/img/blog/2022-09-08-konmari-your-query-migration/many-to-one-dag.png b/website/static/img/blog/2022-09-08-konmari-your-query-migration/many-to-one-dag.png new file mode 100644 index 00000000000..5b8240b58d8 Binary files /dev/null and b/website/static/img/blog/2022-09-08-konmari-your-query-migration/many-to-one-dag.png differ diff --git a/website/static/img/blog/2022-09-08-konmari-your-query-migration/mariekondo.gif b/website/static/img/blog/2022-09-08-konmari-your-query-migration/mariekondo.gif new file mode 100644 index 00000000000..7ff20dd33b6 Binary files /dev/null and b/website/static/img/blog/2022-09-08-konmari-your-query-migration/mariekondo.gif differ diff --git a/website/static/img/blog/2022-09-08-konmari-your-query-migration/nachka-cat.gif b/website/static/img/blog/2022-09-08-konmari-your-query-migration/nachka-cat.gif new file mode 100644 index 00000000000..ab69c07942f Binary files /dev/null and b/website/static/img/blog/2022-09-08-konmari-your-query-migration/nachka-cat.gif differ diff --git a/website/static/img/blog/2022-09-08-konmari-your-query-migration/spaghetti-data-dag.png b/website/static/img/blog/2022-09-08-konmari-your-query-migration/spaghetti-data-dag.png new file mode 100644 index 00000000000..e7ef3a5f0ea Binary files /dev/null and b/website/static/img/blog/2022-09-08-konmari-your-query-migration/spaghetti-data-dag.png differ diff --git a/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/1_basic_git_workflow.png b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/1_basic_git_workflow.png new file mode 100644 index 00000000000..a734fb92f9c Binary files /dev/null and b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/1_basic_git_workflow.png differ diff --git a/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/2_multienvironment.png b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/2_multienvironment.png new file mode 100644 index 00000000000..f3bd1a71aeb Binary files /dev/null and b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/2_multienvironment.png differ diff --git a/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/3_gru.jpg b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/3_gru.jpg new file mode 100644 index 00000000000..2d330a868cc Binary files /dev/null and b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/3_gru.jpg differ diff --git a/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/4_scenario_1.png b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/4_scenario_1.png new file mode 100644 index 00000000000..257ce91b010 Binary files /dev/null and b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/4_scenario_1.png differ diff --git a/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/5_scenario_2.png b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/5_scenario_2.png new file mode 100644 index 00000000000..4b381c26155 Binary files /dev/null and b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/5_scenario_2.png differ diff --git a/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/6_scenario_3.png b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/6_scenario_3.png new file mode 100644 index 00000000000..35d6cd99d8f Binary files /dev/null and b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/6_scenario_3.png differ diff --git a/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/7_bernie.jpg b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/7_bernie.jpg new file mode 100644 index 00000000000..f849471786e Binary files /dev/null and b/website/static/img/blog/2022-09-13-the-case-against-cherry-picking/7_bernie.jpg differ diff --git a/website/static/img/blog/2022-09-28-analyst-to-ae/first_loop.png b/website/static/img/blog/2022-09-28-analyst-to-ae/first_loop.png new file mode 100644 index 00000000000..a5ab1b0f89c Binary files /dev/null and b/website/static/img/blog/2022-09-28-analyst-to-ae/first_loop.png differ diff --git a/website/static/img/blog/2022-09-28-analyst-to-ae/multiple_loops.png b/website/static/img/blog/2022-09-28-analyst-to-ae/multiple_loops.png new file mode 100644 index 00000000000..e37a0d8dd21 Binary files /dev/null and b/website/static/img/blog/2022-09-28-analyst-to-ae/multiple_loops.png differ diff --git a/website/static/img/blog/2022-09-28-analyst-to-ae/new_workflow.png b/website/static/img/blog/2022-09-28-analyst-to-ae/new_workflow.png new file mode 100644 index 00000000000..e2ddb7da352 Binary files /dev/null and b/website/static/img/blog/2022-09-28-analyst-to-ae/new_workflow.png differ diff --git a/website/static/img/blog/2022-09-28-analyst-to-ae/old_workflow.png b/website/static/img/blog/2022-09-28-analyst-to-ae/old_workflow.png new file mode 100644 index 00000000000..a2e10841b6a Binary files /dev/null and b/website/static/img/blog/2022-09-28-analyst-to-ae/old_workflow.png differ diff --git a/website/static/img/blog/2022-10-24-demystifying-event-streams/kafka-topic-table.png b/website/static/img/blog/2022-10-24-demystifying-event-streams/kafka-topic-table.png new file mode 100644 index 00000000000..adb01182221 Binary files /dev/null and b/website/static/img/blog/2022-10-24-demystifying-event-streams/kafka-topic-table.png differ diff --git a/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-kafka-load.png b/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-kafka-load.png new file mode 100644 index 00000000000..c0377b51084 Binary files /dev/null and b/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-kafka-load.png differ diff --git a/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-kafka.png b/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-kafka.png new file mode 100644 index 00000000000..abea9093775 Binary files /dev/null and b/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-kafka.png differ diff --git a/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-stitch.png b/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-stitch.png new file mode 100644 index 00000000000..74d6a2e3c4a Binary files /dev/null and b/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform-stitch.png differ diff --git a/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform.png b/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform.png new file mode 100644 index 00000000000..70f947d830b Binary files /dev/null and b/website/static/img/blog/2022-10-24-demystifying-event-streams/merit-platform.png differ diff --git a/website/static/img/blog/2022-10-24-demystifying-event-streams/omg-contract.png b/website/static/img/blog/2022-10-24-demystifying-event-streams/omg-contract.png new file mode 100644 index 00000000000..9d56d148462 Binary files /dev/null and b/website/static/img/blog/2022-10-24-demystifying-event-streams/omg-contract.png differ diff --git a/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/fivetran-uploader-1.png b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/fivetran-uploader-1.png new file mode 100644 index 00000000000..f92bf116ef6 Binary files /dev/null and b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/fivetran-uploader-1.png differ diff --git a/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/fivetran-uploader-2.png b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/fivetran-uploader-2.png new file mode 100644 index 00000000000..ae836f82424 Binary files /dev/null and b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/fivetran-uploader-2.png differ diff --git a/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/google-drive-uploader.png b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/google-drive-uploader.png new file mode 100644 index 00000000000..bfd0dd4bb50 Binary files /dev/null and b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/google-drive-uploader.png differ diff --git a/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/google-sheets-uploader.png b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/google-sheets-uploader.png new file mode 100644 index 00000000000..188c0f8125b Binary files /dev/null and b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/google-sheets-uploader.png differ diff --git a/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/snowflake-uploader.png b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/snowflake-uploader.png new file mode 100644 index 00000000000..8b598ac1d14 Binary files /dev/null and b/website/static/img/blog/2022-11-22-move-spreadsheets-to-your-dwh/snowflake-uploader.png differ diff --git a/website/static/img/blog/2022-11-30-dbt-project-evaluator/grace_at_coalesce.png b/website/static/img/blog/2022-11-30-dbt-project-evaluator/grace_at_coalesce.png new file mode 100644 index 00000000000..11e2e0381ff Binary files /dev/null and b/website/static/img/blog/2022-11-30-dbt-project-evaluator/grace_at_coalesce.png differ diff --git a/website/static/img/blog/2022-11-30-dbt-project-evaluator/proserv_aliens.png b/website/static/img/blog/2022-11-30-dbt-project-evaluator/proserv_aliens.png new file mode 100644 index 00000000000..3cee19173e9 Binary files /dev/null and b/website/static/img/blog/2022-11-30-dbt-project-evaluator/proserv_aliens.png differ diff --git a/website/static/img/blog/authors/barr-yaron.png b/website/static/img/blog/authors/barr-yaron.png new file mode 100644 index 00000000000..558bb00c8d8 Binary files /dev/null and b/website/static/img/blog/authors/barr-yaron.png differ diff --git a/website/static/img/blog/authors/bennie-regenold.png b/website/static/img/blog/authors/bennie-regenold.png new file mode 100644 index 00000000000..ce74b1de9f9 Binary files /dev/null and b/website/static/img/blog/authors/bennie-regenold.png differ diff --git a/website/static/img/blog/authors/benoit-perigaud.jpeg b/website/static/img/blog/authors/benoit-perigaud.jpeg new file mode 100644 index 00000000000..e4106464475 Binary files /dev/null and b/website/static/img/blog/authors/benoit-perigaud.jpeg differ diff --git a/website/static/img/blog/authors/brittany-krauth.png b/website/static/img/blog/authors/brittany-krauth.png new file mode 100644 index 00000000000..da413820fbc Binary files /dev/null and b/website/static/img/blog/authors/brittany-krauth.png differ diff --git a/website/static/img/blog/authors/callum-mccann.jpg b/website/static/img/blog/authors/callum-mccann.jpg new file mode 100644 index 00000000000..3dd05c6cb90 Binary files /dev/null and b/website/static/img/blog/authors/callum-mccann.jpg differ diff --git a/website/static/img/blog/authors/charlie-summers.jpeg b/website/static/img/blog/authors/charlie-summers.jpeg new file mode 100644 index 00000000000..9c63c89154b Binary files /dev/null and b/website/static/img/blog/authors/charlie-summers.jpeg differ diff --git a/website/static/img/blog/authors/dbeatty.jpeg b/website/static/img/blog/authors/dbeatty.jpeg new file mode 100644 index 00000000000..34c027ea308 Binary files /dev/null and b/website/static/img/blog/authors/dbeatty.jpeg differ diff --git a/website/static/img/blog/authors/grace-goheen.jpeg b/website/static/img/blog/authors/grace-goheen.jpeg new file mode 100644 index 00000000000..06b059b08cb Binary files /dev/null and b/website/static/img/blog/authors/grace-goheen.jpeg differ diff --git a/website/static/img/blog/authors/ian-fahey.png b/website/static/img/blog/authors/ian-fahey.png new file mode 100644 index 00000000000..5d2bd2a0ff4 Binary files /dev/null and b/website/static/img/blog/authors/ian-fahey.png differ diff --git a/website/static/img/blog/authors/jerco.jpeg b/website/static/img/blog/authors/jerco.jpeg new file mode 100644 index 00000000000..c19b58bcd61 Binary files /dev/null and b/website/static/img/blog/authors/jerco.jpeg differ diff --git a/website/static/img/blog/authors/joe-markiewicz.jpeg b/website/static/img/blog/authors/joe-markiewicz.jpeg new file mode 100644 index 00000000000..0531255fbda Binary files /dev/null and b/website/static/img/blog/authors/joe-markiewicz.jpeg differ diff --git a/website/static/img/blog/authors/jonathan-natkins.jpeg b/website/static/img/blog/authors/jonathan-natkins.jpeg new file mode 100644 index 00000000000..6eb69ca30ce Binary files /dev/null and b/website/static/img/blog/authors/jonathan-natkins.jpeg differ diff --git a/website/static/img/blog/authors/wasila-quader.png b/website/static/img/blog/authors/wasila-quader.png new file mode 100644 index 00000000000..1ccfbfac7e2 Binary files /dev/null and b/website/static/img/blog/authors/wasila-quader.png differ diff --git a/website/static/img/blog/authors/yu-ishikawa.jpg b/website/static/img/blog/authors/yu-ishikawa.jpg new file mode 100644 index 00000000000..d236b8403f9 Binary files /dev/null and b/website/static/img/blog/authors/yu-ishikawa.jpg differ diff --git a/website/static/img/create-branch-new-ide.png b/website/static/img/create-branch-new-ide.png new file mode 100644 index 00000000000..081e5de8dda Binary files /dev/null and b/website/static/img/create-branch-new-ide.png differ diff --git a/website/static/img/databricks_tutorial/images/dbt_cloud_setup_databricks_connection_details.png b/website/static/img/databricks_tutorial/images/dbt_cloud_setup_databricks_connection_details.png new file mode 100644 index 00000000000..7ff30872d49 Binary files /dev/null and b/website/static/img/databricks_tutorial/images/dbt_cloud_setup_databricks_connection_details.png differ diff --git a/website/static/img/databricks_tutorial/images/dbt_cloud_setup_databricks_connection_start.png b/website/static/img/databricks_tutorial/images/dbt_cloud_setup_databricks_connection_start.png new file mode 100644 index 00000000000..bd637493e73 Binary files /dev/null and b/website/static/img/databricks_tutorial/images/dbt_cloud_setup_databricks_connection_start.png differ diff --git a/website/static/img/databricks_tutorial/images/setup_databricks_connect.png b/website/static/img/databricks_tutorial/images/setup_databricks_connect.png deleted file mode 100644 index af4ae8d3a25..00000000000 Binary files a/website/static/img/databricks_tutorial/images/setup_databricks_connect.png and /dev/null differ diff --git a/website/static/img/dbt-cloud-project-setup-flow-next.png b/website/static/img/dbt-cloud-project-setup-flow-next.png new file mode 100644 index 00000000000..660e8ae446a Binary files /dev/null and b/website/static/img/dbt-cloud-project-setup-flow-next.png differ diff --git a/website/static/img/dbt-cloud-project-setup-flow.png b/website/static/img/dbt-cloud-project-setup-flow.png deleted file mode 100644 index afd77ac9c01..00000000000 Binary files a/website/static/img/dbt-cloud-project-setup-flow.png and /dev/null differ diff --git a/website/static/img/docs/building-a-dbt-project/building-models/python-models/dataproc-connector-initialization.png b/website/static/img/docs/building-a-dbt-project/building-models/python-models/dataproc-connector-initialization.png new file mode 100644 index 00000000000..4c6095372f1 Binary files /dev/null and b/website/static/img/docs/building-a-dbt-project/building-models/python-models/dataproc-connector-initialization.png differ diff --git a/website/static/img/docs/building-a-dbt-project/building-models/python-models/dataproc-pip-packages.png b/website/static/img/docs/building-a-dbt-project/building-models/python-models/dataproc-pip-packages.png new file mode 100644 index 00000000000..d2c87003042 Binary files /dev/null and b/website/static/img/docs/building-a-dbt-project/building-models/python-models/dataproc-pip-packages.png differ diff --git a/website/static/img/docs/building-a-dbt-project/building-models/python-models/python-model-dag.png b/website/static/img/docs/building-a-dbt-project/building-models/python-models/python-model-dag.png new file mode 100644 index 00000000000..f89070468d1 Binary files /dev/null and b/website/static/img/docs/building-a-dbt-project/building-models/python-models/python-model-dag.png differ diff --git a/website/static/img/docs/building-a-dbt-project/dbt-cloud-project-setup-flow-next.png b/website/static/img/docs/building-a-dbt-project/dbt-cloud-project-setup-flow-next.png new file mode 100644 index 00000000000..660e8ae446a Binary files /dev/null and b/website/static/img/docs/building-a-dbt-project/dbt-cloud-project-setup-flow-next.png differ diff --git a/website/static/img/docs/dbt-cloud/28f97e6-job-schedule.gif b/website/static/img/docs/dbt-cloud/28f97e6-job-schedule.gif deleted file mode 100644 index 8d6cd95defc..00000000000 Binary files a/website/static/img/docs/dbt-cloud/28f97e6-job-schedule.gif and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/Allow-dbt-to-access-slack.png b/website/static/img/docs/dbt-cloud/Allow-dbt-to-access-slack.png new file mode 100644 index 00000000000..2e6b198959c Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Allow-dbt-to-access-slack.png differ diff --git a/website/static/img/docs/dbt-cloud/Configure-SSO-Access.png b/website/static/img/docs/dbt-cloud/Configure-SSO-Access.png new file mode 100644 index 00000000000..1c1f88b9298 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Configure-SSO-Access.png differ diff --git a/website/static/img/docs/dbt-cloud/Configure-Slack-notifications.png b/website/static/img/docs/dbt-cloud/Configure-Slack-notifications.png new file mode 100644 index 00000000000..5a76792f649 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Configure-Slack-notifications.png differ diff --git a/website/static/img/docs/dbt-cloud/Fix Session Timeout.png b/website/static/img/docs/dbt-cloud/Fix Session Timeout.png new file mode 100644 index 00000000000..a8b7079b427 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Fix Session Timeout.png differ diff --git a/website/static/img/docs/dbt-cloud/Link-your-Slack-Profile.png b/website/static/img/docs/dbt-cloud/Link-your-Slack-Profile.png new file mode 100644 index 00000000000..ab2697ec6ce Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Link-your-Slack-Profile.png differ diff --git a/website/static/img/docs/dbt-cloud/Model-timing-tab.png b/website/static/img/docs/dbt-cloud/Model-timing-tab.png new file mode 100644 index 00000000000..dbe76f76a0d Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Model-timing-tab.png differ diff --git a/website/static/img/docs/dbt-cloud/Navigate-to-integrations.png b/website/static/img/docs/dbt-cloud/Navigate-to-integrations.png new file mode 100644 index 00000000000..b1e2789f334 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Navigate-to-integrations.png differ diff --git a/website/static/img/docs/dbt-cloud/Navigate-to-notifications.png b/website/static/img/docs/dbt-cloud/Navigate-to-notifications.png new file mode 100644 index 00000000000..13da17d5deb Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Navigate-to-notifications.png differ diff --git a/website/static/img/docs/dbt-cloud/Save-Group-Information.png b/website/static/img/docs/dbt-cloud/Save-Group-Information.png new file mode 100644 index 00000000000..4ef53211fa0 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Save-Group-Information.png differ diff --git a/website/static/img/docs/dbt-cloud/Select-Groups-RBAC.png b/website/static/img/docs/dbt-cloud/Select-Groups-RBAC.png new file mode 100644 index 00000000000..99bfa215d3a Binary files /dev/null and b/website/static/img/docs/dbt-cloud/Select-Groups-RBAC.png differ diff --git a/website/static/img/docs/dbt-cloud/View-docs-in-IDE.png b/website/static/img/docs/dbt-cloud/View-docs-in-IDE.png new file mode 100644 index 00000000000..9f9d3a34154 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/View-docs-in-IDE.png differ diff --git a/website/static/img/docs/dbt-cloud/access-control/license-manual.png b/website/static/img/docs/dbt-cloud/access-control/license-manual.png index ce94b8ca785..d92f5f77eb9 100644 Binary files a/website/static/img/docs/dbt-cloud/access-control/license-manual.png and b/website/static/img/docs/dbt-cloud/access-control/license-manual.png differ diff --git a/website/static/img/docs/dbt-cloud/access-control/license-mapping.png b/website/static/img/docs/dbt-cloud/access-control/license-mapping.png index e0bee33740e..b483c760d70 100644 Binary files a/website/static/img/docs/dbt-cloud/access-control/license-mapping.png and b/website/static/img/docs/dbt-cloud/access-control/license-mapping.png differ diff --git a/website/static/img/docs/dbt-cloud/b4f242f-view-docs.gif b/website/static/img/docs/dbt-cloud/b4f242f-view-docs.gif deleted file mode 100644 index 370eb048a95..00000000000 Binary files a/website/static/img/docs/dbt-cloud/b4f242f-view-docs.gif and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/c3fe800-Screen_Shot_2019-02-08_at_6.53.29_PM.png b/website/static/img/docs/dbt-cloud/c3fe800-Screen_Shot_2019-02-08_at_6.53.29_PM.png deleted file mode 100644 index 04e0084dbb0..00000000000 Binary files a/website/static/img/docs/dbt-cloud/c3fe800-Screen_Shot_2019-02-08_at_6.53.29_PM.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/bigquery-connection.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/bigquery-connection.png index 6dc86fa8ddc..32a92486b3a 100644 Binary files a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/bigquery-connection.png and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/bigquery-connection.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/Environment-settings.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/Environment-settings.png new file mode 100644 index 00000000000..cee053bf983 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/Environment-settings.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/job-settings.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/job-settings.png new file mode 100644 index 00000000000..2c7b2a7b560 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/job-settings.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions/upgrade-environment.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions/upgrade-environment.png new file mode 100644 index 00000000000..026dbcdff74 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions/upgrade-environment.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/configure-template-url-new-ide.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/configure-template-url-new-ide.png new file mode 100644 index 00000000000..c992eae4f86 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/configure-template-url-new-ide.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/configure-github.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/configure-github.png new file mode 100644 index 00000000000..ead2e76a994 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/configure-github.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-app-install.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-app-install.png new file mode 100644 index 00000000000..442b6c18781 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-app-install.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-auth.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-auth.png new file mode 100644 index 00000000000..2c9fb7032d5 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-auth.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-connect.gif b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-connect.gif new file mode 100644 index 00000000000..d29ed18db15 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-github/github-connect.gif differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/databricks-connections.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/databricks-connections.png new file mode 100644 index 00000000000..d028ae0f06c Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/databricks-connections.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dbt-databricks.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dbt-databricks.png new file mode 100644 index 00000000000..23129fcc06f Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dbt-databricks.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dbt-spark.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dbt-spark.png new file mode 100644 index 00000000000..0584337ece4 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dbt-spark.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dev-environment-custom-branch.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dev-environment-custom-branch.png new file mode 100644 index 00000000000..487188b1c88 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/dev-environment-custom-branch.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/managed-repo.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/managed-repo.png index 7b50cbb00cd..5f1cb58461e 100644 Binary files a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/managed-repo.png and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/managed-repo.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/postgres-redshift-connection.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/postgres-redshift-connection.png index 3f82bd5d5c6..bc89b429ad4 100644 Binary files a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/postgres-redshift-connection.png and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/postgres-redshift-connection.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/postgres-redshift-ssh-tunnel.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/postgres-redshift-ssh-tunnel.png index beeda17947b..04461afa9ed 100644 Binary files a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/postgres-redshift-ssh-tunnel.png and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/postgres-redshift-ssh-tunnel.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/spark-connection.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/spark-connection.png index c54f48c9175..4752e1d565d 100644 Binary files a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/spark-connection.png and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/spark-connection.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/breadcrumbs.png b/website/static/img/docs/dbt-cloud/cloud-ide/breadcrumbs.png new file mode 100644 index 00000000000..eef522a742d Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/breadcrumbs.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/build.png b/website/static/img/docs/dbt-cloud/cloud-ide/build.png new file mode 100644 index 00000000000..c21ebead49d Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/build.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/command_bar.png b/website/static/img/docs/dbt-cloud/cloud-ide/command_bar.png new file mode 100644 index 00000000000..5a2a67a7303 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/command_bar.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/create-new.png b/website/static/img/docs/dbt-cloud/cloud-ide/create-new.png new file mode 100644 index 00000000000..f40a554f341 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/create-new.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/credentials.png b/website/static/img/docs/dbt-cloud/cloud-ide/credentials.png new file mode 100644 index 00000000000..7afb78409e1 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/credentials.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/file-explorer.png b/website/static/img/docs/dbt-cloud/cloud-ide/file-explorer.png new file mode 100644 index 00000000000..bbf1fcc0427 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/file-explorer.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/git-overview.png b/website/static/img/docs/dbt-cloud/cloud-ide/git-overview.png new file mode 100644 index 00000000000..eae4da2da85 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/git-overview.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/lineage.png b/website/static/img/docs/dbt-cloud/cloud-ide/lineage.png new file mode 100644 index 00000000000..a4293bbcaf3 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/lineage.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/multi-selector.gif b/website/static/img/docs/dbt-cloud/cloud-ide/multi-selector.gif new file mode 100644 index 00000000000..9159ed61c17 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/multi-selector.gif differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/new-environment.png b/website/static/img/docs/dbt-cloud/cloud-ide/new-environment.png new file mode 100644 index 00000000000..6998cc3ce9c Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/new-environment.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/status-icon.png b/website/static/img/docs/dbt-cloud/cloud-ide/status-icon.png new file mode 100644 index 00000000000..129986a1e60 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/status-icon.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/tab-options.png b/website/static/img/docs/dbt-cloud/cloud-ide/tab-options.png new file mode 100644 index 00000000000..0af73130cf3 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/tab-options.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-ide/view-docs.png b/website/static/img/docs/dbt-cloud/cloud-ide/view-docs.png new file mode 100644 index 00000000000..9c7c9312fa4 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-ide/view-docs.png differ diff --git a/website/static/img/docs/dbt-cloud/connecting-azure-devops/Azure Devops App in dbt Cloud.gif b/website/static/img/docs/dbt-cloud/connecting-azure-devops/Azure Devops App in dbt Cloud.gif deleted file mode 100644 index bf319c1f83f..00000000000 Binary files a/website/static/img/docs/dbt-cloud/connecting-azure-devops/Azure Devops App in dbt Cloud.gif and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/connecting-azure-devops/AzureDevopsAppdbtCloud.gif b/website/static/img/docs/dbt-cloud/connecting-azure-devops/AzureDevopsAppdbtCloud.gif new file mode 100644 index 00000000000..0c68cc06f35 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/connecting-azure-devops/AzureDevopsAppdbtCloud.gif differ diff --git a/website/static/img/docs/dbt-cloud/connecting-azure-devops/LinktoAzure.png b/website/static/img/docs/dbt-cloud/connecting-azure-devops/LinktoAzure.png new file mode 100644 index 00000000000..143d4127aec Binary files /dev/null and b/website/static/img/docs/dbt-cloud/connecting-azure-devops/LinktoAzure.png differ diff --git a/website/static/img/docs/dbt-cloud/connecting-azure-devops/azure-service-user.png b/website/static/img/docs/dbt-cloud/connecting-azure-devops/azure-service-user.png index cbd683fbde1..20277967f22 100644 Binary files a/website/static/img/docs/dbt-cloud/connecting-azure-devops/azure-service-user.png and b/website/static/img/docs/dbt-cloud/connecting-azure-devops/azure-service-user.png differ diff --git a/website/static/img/docs/dbt-cloud/connecting-gitlab/GitLab-Navigation.gif b/website/static/img/docs/dbt-cloud/connecting-gitlab/GitLab-Navigation.gif new file mode 100644 index 00000000000..a9d624ae6e3 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/connecting-gitlab/GitLab-Navigation.gif differ diff --git a/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab app.png b/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab app.png new file mode 100644 index 00000000000..e90c6b08148 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab app.png differ diff --git a/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab nav.gif b/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab nav.gif new file mode 100644 index 00000000000..419ce38bc3c Binary files /dev/null and b/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab nav.gif differ diff --git a/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab redirect.gif b/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab redirect.gif new file mode 100644 index 00000000000..c153b64cf88 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/connecting-gitlab/gitlab redirect.gif differ diff --git a/website/static/img/docs/dbt-cloud/dag v1.1.56 release.png b/website/static/img/docs/dbt-cloud/dag v1.1.56 release.png new file mode 100644 index 00000000000..1066ff22d5b Binary files /dev/null and b/website/static/img/docs/dbt-cloud/dag v1.1.56 release.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt docs generate command.png b/website/static/img/docs/dbt-cloud/dbt docs generate command.png new file mode 100644 index 00000000000..2e9bc318dc5 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/dbt docs generate command.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/1bd0c42-Screen_Shot_2020-03-10_at_6.20.05_PM.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/1bd0c42-Screen_Shot_2020-03-10_at_6.20.05_PM.png deleted file mode 100644 index ffd2bb0d201..00000000000 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/1bd0c42-Screen_Shot_2020-03-10_at_6.20.05_PM.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/84427818-841b3680-abf3-11ea-8faf-693d4a39cffb.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/84427818-841b3680-abf3-11ea-8faf-693d4a39cffb.png new file mode 100644 index 00000000000..062ad9f7fb4 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/84427818-841b3680-abf3-11ea-8faf-693d4a39cffb.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-detail.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-detail.png deleted file mode 100644 index 93b10ac1e09..00000000000 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-detail.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-list.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-list.png index c741aec4307..28262bd5635 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-list.png and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-list.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-permissions.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-permissions.png index 285f4d2c1d1..55655ef6b78 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-permissions.png and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/access-control/group-permissions.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/ae2045a-Screen_Shot_2019-04-25_at_6.05.12_PM.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/ae2045a-Screen_Shot_2019-04-25_at_6.05.12_PM.png deleted file mode 100644 index a5303eb9695..00000000000 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/ae2045a-Screen_Shot_2019-04-25_at_6.05.12_PM.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/audit-log-menu.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/audit-log-menu.png index 58d5fbb1022..a5010c50ef3 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/audit-log-menu.png and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/audit-log-menu.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/audit-log-section.jpg b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/audit-log-section.jpg new file mode 100644 index 00000000000..327b51f8988 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/audit-log-section.jpg differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/azure/azure-cloud-sso.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/azure/azure-cloud-sso.png index 17753696e61..ea8d2a985f1 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/azure/azure-cloud-sso.png and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/azure/azure-cloud-sso.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/database-connection-snowflake-oauth.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/database-connection-snowflake-oauth.png new file mode 100644 index 00000000000..4c638eed6ae Binary files /dev/null and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/database-connection-snowflake-oauth.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/gsuite/gsuite-sso-cloud-config.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/gsuite/gsuite-sso-cloud-config.png index 91d097c7025..3e7bde2651d 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/gsuite/gsuite-sso-cloud-config.png and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/gsuite/gsuite-sso-cloud-config.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/gsuite/gsuite-sso-cloud-verify.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/gsuite/gsuite-sso-cloud-verify.png index f328ae7c375..8636cbd7186 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/gsuite/gsuite-sso-cloud-verify.png and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/gsuite/gsuite-sso-cloud-verify.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/okta/okta-6-setup-integration.png b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/okta/okta-6-setup-integration.png index 917287e5eaf..acf8395663f 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/okta/okta-6-setup-integration.png and b/website/static/img/docs/dbt-cloud/dbt-cloud-enterprise/okta/okta-6-setup-integration.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-docs-generate-command.png b/website/static/img/docs/dbt-cloud/dbt-docs-generate-command.png new file mode 100644 index 00000000000..2e9bc318dc5 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/dbt-docs-generate-command.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-quickstart-connection.png b/website/static/img/docs/dbt-cloud/dbt-quickstart-connection.png index dc597500c86..bc49b6c9400 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-quickstart-connection.png and b/website/static/img/docs/dbt-cloud/dbt-quickstart-connection.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-quickstart-environment.png b/website/static/img/docs/dbt-cloud/dbt-quickstart-environment.png index 596e99209a0..565fd248815 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-quickstart-environment.png and b/website/static/img/docs/dbt-cloud/dbt-quickstart-environment.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-quickstart-new-job-schedule.png b/website/static/img/docs/dbt-cloud/dbt-quickstart-new-job-schedule.png index f5ebcd82189..22838440554 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-quickstart-new-job-schedule.png and b/website/static/img/docs/dbt-cloud/dbt-quickstart-new-job-schedule.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-quickstart-new-job.png b/website/static/img/docs/dbt-cloud/dbt-quickstart-new-job.png index c182c002d59..bae0b6f486d 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-quickstart-new-job.png and b/website/static/img/docs/dbt-cloud/dbt-quickstart-new-job.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-quickstart-repository.png b/website/static/img/docs/dbt-cloud/dbt-quickstart-repository.png index a395b7e9592..c015709d7b4 100644 Binary files a/website/static/img/docs/dbt-cloud/dbt-quickstart-repository.png and b/website/static/img/docs/dbt-cloud/dbt-quickstart-repository.png differ diff --git a/website/static/img/docs/dbt-cloud/dbt-run-logs.png b/website/static/img/docs/dbt-cloud/dbt-run-logs.png new file mode 100644 index 00000000000..f9886ccba4f Binary files /dev/null and b/website/static/img/docs/dbt-cloud/dbt-run-logs.png differ diff --git a/website/static/img/docs/dbt-cloud/overview-job-schedule.gif b/website/static/img/docs/dbt-cloud/overview-job-schedule.gif new file mode 100644 index 00000000000..c9848e5d654 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/overview-job-schedule.gif differ diff --git a/website/static/img/docs/dbt-cloud/refresh-ide/Git overview.png b/website/static/img/docs/dbt-cloud/refresh-ide/Git overview.png new file mode 100644 index 00000000000..6f3113c7959 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/refresh-ide/Git overview.png differ diff --git a/website/static/img/docs/dbt-cloud/refresh-ide/building.gif b/website/static/img/docs/dbt-cloud/refresh-ide/building.gif new file mode 100644 index 00000000000..b521eaa18d5 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/refresh-ide/building.gif differ diff --git a/website/static/img/docs/dbt-cloud/refresh-ide/dev-credentials.png b/website/static/img/docs/dbt-cloud/refresh-ide/dev-credentials.png new file mode 100644 index 00000000000..b73ca427a8b Binary files /dev/null and b/website/static/img/docs/dbt-cloud/refresh-ide/dev-credentials.png differ diff --git a/website/static/img/docs/dbt-cloud/refresh-ide/new-environment-fields.png b/website/static/img/docs/dbt-cloud/refresh-ide/new-environment-fields.png new file mode 100644 index 00000000000..886e69fb10b Binary files /dev/null and b/website/static/img/docs/dbt-cloud/refresh-ide/new-environment-fields.png differ diff --git a/website/static/img/docs/dbt-cloud/refresh-ide/new-environment.png b/website/static/img/docs/dbt-cloud/refresh-ide/new-environment.png new file mode 100644 index 00000000000..bc5c3288260 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/refresh-ide/new-environment.png differ diff --git a/website/static/img/docs/dbt-cloud/refresh-ide/refresh-ide.png b/website/static/img/docs/dbt-cloud/refresh-ide/refresh-ide.png new file mode 100644 index 00000000000..3f9660e9e6f Binary files /dev/null and b/website/static/img/docs/dbt-cloud/refresh-ide/refresh-ide.png differ diff --git a/website/static/img/docs/dbt-cloud/select-source-freshness.png b/website/static/img/docs/dbt-cloud/select-source-freshness.png index 3fa1550e7f2..8ca6a7eec8c 100644 Binary files a/website/static/img/docs/dbt-cloud/select-source-freshness.png and b/website/static/img/docs/dbt-cloud/select-source-freshness.png differ diff --git a/website/static/img/docs/dbt-cloud/semantic-layer/configure_sl.png b/website/static/img/docs/dbt-cloud/semantic-layer/configure_sl.png new file mode 100644 index 00000000000..bc9d8df6746 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/semantic-layer/configure_sl.png differ diff --git a/website/static/img/docs/dbt-cloud/semantic-layer/metrics_package.png b/website/static/img/docs/dbt-cloud/semantic-layer/metrics_package.png new file mode 100644 index 00000000000..8fe9ff88183 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/semantic-layer/metrics_package.png differ diff --git a/website/static/img/docs/dbt-cloud/semantic-layer/metrics_red_nodes.png b/website/static/img/docs/dbt-cloud/semantic-layer/metrics_red_nodes.png new file mode 100644 index 00000000000..19242c5b408 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/semantic-layer/metrics_red_nodes.png differ diff --git a/website/static/img/docs/dbt-cloud/semantic-layer/sl-architecture-flow.png b/website/static/img/docs/dbt-cloud/semantic-layer/sl-architecture-flow.png new file mode 100644 index 00000000000..bee9f6ae13a Binary files /dev/null and b/website/static/img/docs/dbt-cloud/semantic-layer/sl-architecture-flow.png differ diff --git a/website/static/img/docs/dbt-cloud/semantic-layer/sl_architecture.png b/website/static/img/docs/dbt-cloud/semantic-layer/sl_architecture.png new file mode 100644 index 00000000000..b5603790a0c Binary files /dev/null and b/website/static/img/docs/dbt-cloud/semantic-layer/sl_architecture.png differ diff --git a/website/static/img/docs/dbt-cloud/snowflake-conn-details.png b/website/static/img/docs/dbt-cloud/snowflake-conn-details.png index aeea62f79aa..3d55ebb3bab 100644 Binary files a/website/static/img/docs/dbt-cloud/snowflake-conn-details.png and b/website/static/img/docs/dbt-cloud/snowflake-conn-details.png differ diff --git a/website/static/img/docs/dbt-cloud/snowflake-keypair-auth.png b/website/static/img/docs/dbt-cloud/snowflake-keypair-auth.png index 999b274e251..7f0e29de590 100644 Binary files a/website/static/img/docs/dbt-cloud/snowflake-keypair-auth.png and b/website/static/img/docs/dbt-cloud/snowflake-keypair-auth.png differ diff --git a/website/static/img/docs/dbt-cloud/snowflake-userpass-auth.png b/website/static/img/docs/dbt-cloud/snowflake-userpass-auth.png index ae02835c86f..2dc87cabe32 100644 Binary files a/website/static/img/docs/dbt-cloud/snowflake-userpass-auth.png and b/website/static/img/docs/dbt-cloud/snowflake-userpass-auth.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/3c3c1ca-Screen_Shot_2019-03-21_at_11.05.28_AM.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/3c3c1ca-Screen_Shot_2019-03-21_at_11.05.28_AM.png deleted file mode 100644 index a60b91b8d66..00000000000 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/3c3c1ca-Screen_Shot_2019-03-21_at_11.05.28_AM.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/568adab-Screen_Shot_2019-02-08_at_9.13.09_PM.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/568adab-Screen_Shot_2019-02-08_at_9.13.09_PM.png deleted file mode 100644 index 780d4c48a0c..00000000000 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/568adab-Screen_Shot_2019-02-08_at_9.13.09_PM.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/60f3fa2-Screen_Shot_2019-02-08_at_10.33.20_PM.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/60f3fa2-Screen_Shot_2019-02-08_at_10.33.20_PM.png deleted file mode 100644 index de4c52f8c89..00000000000 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/60f3fa2-Screen_Shot_2019-02-08_at_10.33.20_PM.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/61536c9-Screen_Shot_2019-02-08_at_9.46.29_PM.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/61536c9-Screen_Shot_2019-02-08_at_9.46.29_PM.png index c6fb2282839..1fd34b2f29d 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/61536c9-Screen_Shot_2019-02-08_at_9.46.29_PM.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/61536c9-Screen_Shot_2019-02-08_at_9.46.29_PM.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/98c05c5-Screen_Shot_2019-02-08_at_9.18.22_PM.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/98c05c5-Screen_Shot_2019-02-08_at_9.18.22_PM.png index 11e4e26b29b..fc11996726c 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/98c05c5-Screen_Shot_2019-02-08_at_9.18.22_PM.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/98c05c5-Screen_Shot_2019-02-08_at_9.18.22_PM.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/ADO CI Check.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/ADO CI Check.png new file mode 100644 index 00000000000..096978c2804 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/ADO CI Check.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/Disconnect-Repository.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/Disconnect-Repository.png index 7e657e2a0a8..58b07f42047 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/Disconnect-Repository.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/Disconnect-Repository.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/repo-config.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/repo-config.png index 5f944102f5a..b49c1887039 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/repo-config.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Enabling-CI/repo-config.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/DBT_ENV_SECRET.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/DBT_ENV_SECRET.png index b768f27803f..2a8b17dac70 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/DBT_ENV_SECRET.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/DBT_ENV_SECRET.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/job-override.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/job-override.gif index e8c71a1360d..f513c5170b1 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/job-override.gif and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/job-override.gif differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/job-override.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/job-override.png index 3eac8bb095a..6467b0d0fa9 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/job-override.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/job-override.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/navigate-to-env-vars.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/navigate-to-env-vars.gif index 5e964d45d33..5ae365c4244 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/navigate-to-env-vars.gif and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/navigate-to-env-vars.gif differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/personal-override.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/personal-override.gif index 3244966d967..9eabb2458de 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/personal-override.gif and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/personal-override.gif differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/personal-override.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/personal-override.png index 0df7461065d..505461fea48 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/personal-override.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/personal-override.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/project-environment-view.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/project-environment-view.png index b3bdf505589..a973448c032 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/project-environment-view.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/project-environment-view.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/warehouse-override.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/warehouse-override.png index d067fc37f7c..25c7ce3edd5 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/warehouse-override.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/Environment Variables/warehouse-override.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/ci-deferral.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/ci-deferral.png index 0eca99ad59b..5186b0a9a76 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/ci-deferral.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/ci-deferral.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/d43d5e6-job-schedule.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/d43d5e6-job-schedule.gif deleted file mode 100644 index 8d6cd95defc..00000000000 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/d43d5e6-job-schedule.gif and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/dashboard-status-tiles/tableau-object.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dashboard-status-tiles/tableau-object.png new file mode 100644 index 00000000000..3125cda7335 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dashboard-status-tiles/tableau-object.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/data-sources-next.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/data-sources-next.png new file mode 100644 index 00000000000..f4db3f40638 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/data-sources-next.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/data-sources.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/data-sources.png index ce34963b056..c8ee5c8f684 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/data-sources.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/data-sources.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/BQ-access.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/BQ-access.png new file mode 100644 index 00000000000..3ac8118710d Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/BQ-access.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/BQ-nav.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/BQ-nav.gif new file mode 100644 index 00000000000..27e3caf39cc Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/BQ-nav.gif differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/bq-oauth-app.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/bq-oauth-app.gif new file mode 100644 index 00000000000..071263396f1 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/bq-oauth-app.gif differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/dbt-cloud-bq-id-secret.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/dbt-cloud-bq-id-secret.gif new file mode 100644 index 00000000000..0545fb853a3 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/BQ-auth/dbt-cloud-bq-id-secret.gif differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/developer-bq-auth.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/developer-bq-auth.gif new file mode 100644 index 00000000000..b3d1f82751c Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/dbt-cloud-enterprise/developer-bq-auth.gif differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/development-credentials.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/development-credentials.png index d6502609e1b..7eea1bddf32 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/development-credentials.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/development-credentials.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/doc-menu.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/doc-menu.png index 66b4de030ac..9fb71acafff 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/doc-menu.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/doc-menu.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/documentation-job-execution-settings.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/documentation-job-execution-settings.png new file mode 100644 index 00000000000..b8db4ab22d2 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/documentation-job-execution-settings.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/documentation-project-details.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/documentation-project-details.png new file mode 100644 index 00000000000..6b1aad38f2e Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/documentation-project-details.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/edit-job-generate-artifacts.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/edit-job-generate-artifacts.png new file mode 100644 index 00000000000..365e2a4a9e1 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/edit-job-generate-artifacts.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/email-notifications.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/email-notifications.png index 1dbd7ccc397..7e77098d63e 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/email-notifications.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/email-notifications.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/job-schedule.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/job-schedule.png new file mode 100644 index 00000000000..58bb199aeeb Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/job-schedule.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/job-step-source-freshness.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/job-step-source-freshness.png index 8a10b76236a..cc027ef58f9 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/job-step-source-freshness.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/job-step-source-freshness.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/jobs-settings-target-name.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/jobs-settings-target-name.png new file mode 100644 index 00000000000..eaeb4b65c3f Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/jobs-settings-target-name.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifact-updated.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifact-updated.gif deleted file mode 100644 index 8fb3fd8015e..00000000000 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifact-updated.gif and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifact-updated.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifact-updated.png new file mode 100644 index 00000000000..6ec0c9b32f0 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifact-updated.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifacts.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifacts.png deleted file mode 100644 index be4661c85c2..00000000000 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/project-level-artifacts.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/using_ci_dbt_cloud.png b/website/static/img/docs/dbt-cloud/using-dbt-cloud/using_ci_dbt_cloud.png index bb6410e4ffc..63fa5a1450d 100644 Binary files a/website/static/img/docs/dbt-cloud/using-dbt-cloud/using_ci_dbt_cloud.png and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/using_ci_dbt_cloud.png differ diff --git a/website/static/img/docs/dbt-cloud/using-dbt-cloud/viewing-docs.gif b/website/static/img/docs/dbt-cloud/using-dbt-cloud/viewing-docs.gif new file mode 100644 index 00000000000..0f15c867ac4 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/using-dbt-cloud/viewing-docs.gif differ diff --git a/website/static/img/docs/running-a-dbt-project/0d9f366-Screen_Shot_2019-11-19_at_12.13.28_PM.png b/website/static/img/docs/running-a-dbt-project/0d9f366-Screen_Shot_2019-11-19_at_12.13.28_PM.png deleted file mode 100644 index a803622d798..00000000000 Binary files a/website/static/img/docs/running-a-dbt-project/0d9f366-Screen_Shot_2019-11-19_at_12.13.28_PM.png and /dev/null differ diff --git a/website/static/img/docs/running-a-dbt-project/65fb95f-Screen_Shot_2019-11-19_at_11.30.00_AM.png b/website/static/img/docs/running-a-dbt-project/65fb95f-Screen_Shot_2019-11-19_at_11.30.00_AM.png deleted file mode 100644 index 41a33ab495c..00000000000 Binary files a/website/static/img/docs/running-a-dbt-project/65fb95f-Screen_Shot_2019-11-19_at_11.30.00_AM.png and /dev/null differ diff --git a/website/static/img/docs/running-a-dbt-project/8e7a6eb-cloud-img.png b/website/static/img/docs/running-a-dbt-project/8e7a6eb-cloud-img.png deleted file mode 100644 index 6090fd27006..00000000000 Binary files a/website/static/img/docs/running-a-dbt-project/8e7a6eb-cloud-img.png and /dev/null differ diff --git a/website/static/img/docs/running-a-dbt-project/a810a20-Screen_Shot_2019-11-19_at_11.26.55_AM.png b/website/static/img/docs/running-a-dbt-project/a810a20-Screen_Shot_2019-11-19_at_11.26.55_AM.png deleted file mode 100644 index 0531c780abf..00000000000 Binary files a/website/static/img/docs/running-a-dbt-project/a810a20-Screen_Shot_2019-11-19_at_11.26.55_AM.png and /dev/null differ diff --git a/website/static/img/docs/running-a-dbt-project/dbt_cloud_airflow_trigger.png b/website/static/img/docs/running-a-dbt-project/dbt_cloud_airflow_trigger.png index 2a68d742e15..892370656c0 100644 Binary files a/website/static/img/docs/running-a-dbt-project/dbt_cloud_airflow_trigger.png and b/website/static/img/docs/running-a-dbt-project/dbt_cloud_airflow_trigger.png differ diff --git a/website/static/img/docs/running-a-dbt-project/ec04c10-Screen_Shot_2019-11-19_at_12.13.46_PM.png b/website/static/img/docs/running-a-dbt-project/ec04c10-Screen_Shot_2019-11-19_at_12.13.46_PM.png deleted file mode 100644 index 2ecb875afb4..00000000000 Binary files a/website/static/img/docs/running-a-dbt-project/ec04c10-Screen_Shot_2019-11-19_at_12.13.46_PM.png and /dev/null differ diff --git a/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/create-dev-env.png b/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/create-dev-env.png new file mode 100644 index 00000000000..6e27d9922b8 Binary files /dev/null and b/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/create-dev-env.png differ diff --git a/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/dev-cred-edit-proj.png b/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/dev-cred-edit-proj.png new file mode 100644 index 00000000000..50ad975613b Binary files /dev/null and b/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/dev-cred-edit-proj.png differ diff --git a/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/dev-cred-project-setup.png b/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/dev-cred-project-setup.png new file mode 100644 index 00000000000..f907ff27edc Binary files /dev/null and b/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/dev-cred-project-setup.png differ diff --git a/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/empty-env-page.png b/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/empty-env-page.png new file mode 100644 index 00000000000..14444259f6e Binary files /dev/null and b/website/static/img/docs/running-a-dbt-project/using-the-dbt-ide/empty-env-page.png differ diff --git a/website/static/img/docs/terms/dag/bad_dag.png b/website/static/img/docs/terms/dag/bad_dag.png new file mode 100644 index 00000000000..99fe8f8c56f Binary files /dev/null and b/website/static/img/docs/terms/dag/bad_dag.png differ diff --git a/website/static/img/docs/terms/dag/lineage_graph.png b/website/static/img/docs/terms/dag/lineage_graph.png new file mode 100644 index 00000000000..a2b88adff87 Binary files /dev/null and b/website/static/img/docs/terms/dag/lineage_graph.png differ diff --git a/website/static/img/docs/terms/dag/mini_dag.png b/website/static/img/docs/terms/dag/mini_dag.png new file mode 100644 index 00000000000..19693581ab2 Binary files /dev/null and b/website/static/img/docs/terms/dag/mini_dag.png differ diff --git a/website/static/img/docs/terms/data-lineage/bad_dag.png b/website/static/img/docs/terms/data-lineage/bad_dag.png new file mode 100644 index 00000000000..99fe8f8c56f Binary files /dev/null and b/website/static/img/docs/terms/data-lineage/bad_dag.png differ diff --git a/website/static/img/docs/terms/data-lineage/dag_example.png b/website/static/img/docs/terms/data-lineage/dag_example.png new file mode 100644 index 00000000000..eb78ab0a0ca Binary files /dev/null and b/website/static/img/docs/terms/data-lineage/dag_example.png differ diff --git a/website/static/img/docs/terms/dimensional-modeling/fact-star.png b/website/static/img/docs/terms/dimensional-modeling/fact-star.png new file mode 100644 index 00000000000..22ebed964c1 Binary files /dev/null and b/website/static/img/docs/terms/dimensional-modeling/fact-star.png differ diff --git a/website/static/img/docs/terms/edw/meme.png b/website/static/img/docs/terms/edw/meme.png new file mode 100644 index 00000000000..7aa96ef8780 Binary files /dev/null and b/website/static/img/docs/terms/edw/meme.png differ diff --git a/website/static/img/docs/terms/relational-database/relation.png b/website/static/img/docs/terms/relational-database/relation.png new file mode 100644 index 00000000000..68b07a9567b Binary files /dev/null and b/website/static/img/docs/terms/relational-database/relation.png differ diff --git a/website/static/img/docs/terms/reverse-etl/reverse-etl-diagram.png b/website/static/img/docs/terms/reverse-etl/reverse-etl-diagram.png new file mode 100644 index 00000000000..73acadd6953 Binary files /dev/null and b/website/static/img/docs/terms/reverse-etl/reverse-etl-diagram.png differ diff --git a/website/static/img/favicon.ico b/website/static/img/favicon.ico deleted file mode 100644 index ac704a27297..00000000000 Binary files a/website/static/img/favicon.ico and /dev/null differ diff --git a/website/static/img/favicon.png b/website/static/img/favicon.png new file mode 100644 index 00000000000..50b097a0468 Binary files /dev/null and b/website/static/img/favicon.png differ diff --git a/website/static/img/favicon.svg b/website/static/img/favicon.svg new file mode 100644 index 00000000000..a0577b18630 --- /dev/null +++ b/website/static/img/favicon.svg @@ -0,0 +1,12 @@ + + + + + \ No newline at end of file diff --git a/website/static/img/guides/best-practices/how-we-structure/narrowing-dag.png b/website/static/img/guides/best-practices/how-we-structure/narrowing-dag.png new file mode 100644 index 00000000000..66fe71acc8f Binary files /dev/null and b/website/static/img/guides/best-practices/how-we-structure/narrowing-dag.png differ diff --git a/website/static/img/guides/best-practices/how-we-structure/widening-dag.png b/website/static/img/guides/best-practices/how-we-structure/widening-dag.png new file mode 100644 index 00000000000..13e987aa25e Binary files /dev/null and b/website/static/img/guides/best-practices/how-we-structure/widening-dag.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-connections-menu.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-connections-menu.png new file mode 100644 index 00000000000..7a55d0bbad0 Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-connections-menu.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-dag.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-dag.png new file mode 100644 index 00000000000..3c491d484a5 Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-dag.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-login.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-login.png new file mode 100644 index 00000000000..a3fa42efe15 Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/airflow-login.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/approve-clearing.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/approve-clearing.png new file mode 100644 index 00000000000..3cc7442189f Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/approve-clearing.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/clear-task-instance.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/clear-task-instance.png new file mode 100644 index 00000000000..92ac3f1cd97 Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/clear-task-instance.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/connection-type.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/connection-type.png new file mode 100644 index 00000000000..d64163d6173 Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/connection-type.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/create-connection.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/create-connection.png new file mode 100644 index 00000000000..b6b2ecc412b Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/create-connection.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/re-run.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/re-run.png new file mode 100644 index 00000000000..a57a6be01c5 Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/re-run.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/task-dag-view.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/task-dag-view.png new file mode 100644 index 00000000000..3c491d484a5 Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/task-dag-view.png differ diff --git a/website/static/img/guides/orchestration/airflow-and-dbt-cloud/task-run-instance.png b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/task-run-instance.png new file mode 100644 index 00000000000..757515554e2 Binary files /dev/null and b/website/static/img/guides/orchestration/airflow-and-dbt-cloud/task-run-instance.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-api-key-gitlab.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-api-key-gitlab.png new file mode 100644 index 00000000000..b339f5aa9bd Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-api-key-gitlab.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-github-triggered.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-github-triggered.png new file mode 100644 index 00000000000..8bff1464c30 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-github-triggered.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-gitlab-triggered.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-gitlab-triggered.png new file mode 100644 index 00000000000..5c0ad4b43f7 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-gitlab-triggered.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-url.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-url.png new file mode 100644 index 00000000000..3d06935abc1 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-cloud-job-url.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-run-on-merge-github.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-run-on-merge-github.png new file mode 100644 index 00000000000..d9106d5627c Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-run-on-merge-github.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-run-on-merge-gitlab.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-run-on-merge-gitlab.png new file mode 100644 index 00000000000..1f7a102ace5 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-run-on-merge-gitlab.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-service-token-page.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-service-token-page.png new file mode 100644 index 00000000000..d213c03b83a Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/dbt-service-token-page.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/gitlab-cicd-payment-warning.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/gitlab-cicd-payment-warning.png new file mode 100644 index 00000000000..7df73f200f2 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/gitlab-cicd-payment-warning.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-github.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-github.png new file mode 100644 index 00000000000..e21be5a8a7c Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-github.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-gitlab.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-gitlab.png new file mode 100644 index 00000000000..e232dddbe04 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-gitlab.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-logs-github.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-logs-github.png new file mode 100644 index 00000000000..ea27ed6e527 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-logs-github.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-logs-gitlab.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-logs-gitlab.png new file mode 100644 index 00000000000..f4fe49c97b1 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/lint-on-push-logs-gitlab.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/pipeline-diagram.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/pipeline-diagram.png new file mode 100644 index 00000000000..224a122246f Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/pipeline-diagram.png differ diff --git a/website/static/img/guides/orchestration/custom-cicd-pipelines/pipeline-programs-diagram.png b/website/static/img/guides/orchestration/custom-cicd-pipelines/pipeline-programs-diagram.png new file mode 100644 index 00000000000..40e2912f087 Binary files /dev/null and b/website/static/img/guides/orchestration/custom-cicd-pipelines/pipeline-programs-diagram.png differ diff --git a/website/static/img/hero-graphic-2x.png b/website/static/img/hero-graphic-2x.png new file mode 100644 index 00000000000..8501a22e2c6 Binary files /dev/null and b/website/static/img/hero-graphic-2x.png differ diff --git a/website/static/img/hero-graphic.png b/website/static/img/hero-graphic.png new file mode 100644 index 00000000000..9c6a6d81485 Binary files /dev/null and b/website/static/img/hero-graphic.png differ diff --git a/website/static/img/icons/book.svg b/website/static/img/icons/book.svg new file mode 100644 index 00000000000..ff1faa686b5 --- /dev/null +++ b/website/static/img/icons/book.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/calendar.svg b/website/static/img/icons/calendar.svg new file mode 100644 index 00000000000..de2a9b11880 --- /dev/null +++ b/website/static/img/icons/calendar.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/computer.svg b/website/static/img/icons/computer.svg new file mode 100644 index 00000000000..c9a61e442bf --- /dev/null +++ b/website/static/img/icons/computer.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/discussions.svg b/website/static/img/icons/discussions.svg new file mode 100644 index 00000000000..e29a8dd8930 --- /dev/null +++ b/website/static/img/icons/discussions.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/docs.svg b/website/static/img/icons/docs.svg new file mode 100644 index 00000000000..bb788a1b4d8 --- /dev/null +++ b/website/static/img/icons/docs.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/folder.svg b/website/static/img/icons/folder.svg new file mode 100644 index 00000000000..e65bf093c95 --- /dev/null +++ b/website/static/img/icons/folder.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/guides.svg b/website/static/img/icons/guides.svg new file mode 100644 index 00000000000..c67e31a2018 --- /dev/null +++ b/website/static/img/icons/guides.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/packages.svg b/website/static/img/icons/packages.svg new file mode 100644 index 00000000000..cd2623a05b3 --- /dev/null +++ b/website/static/img/icons/packages.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/pencil-paper.svg b/website/static/img/icons/pencil-paper.svg new file mode 100644 index 00000000000..b614cd9f186 --- /dev/null +++ b/website/static/img/icons/pencil-paper.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/question-mark.svg b/website/static/img/icons/question-mark.svg new file mode 100644 index 00000000000..1de805a8e8f --- /dev/null +++ b/website/static/img/icons/question-mark.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/rocket.svg b/website/static/img/icons/rocket.svg new file mode 100644 index 00000000000..c524ebead6a --- /dev/null +++ b/website/static/img/icons/rocket.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/slack.svg b/website/static/img/icons/slack.svg new file mode 100644 index 00000000000..b8f5a0576f9 --- /dev/null +++ b/website/static/img/icons/slack.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/smiley-face.svg b/website/static/img/icons/smiley-face.svg new file mode 100644 index 00000000000..dbd23e8ed34 --- /dev/null +++ b/website/static/img/icons/smiley-face.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/star.svg b/website/static/img/icons/star.svg new file mode 100644 index 00000000000..b504aa7e5dc --- /dev/null +++ b/website/static/img/icons/star.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/book.svg b/website/static/img/icons/white/book.svg new file mode 100644 index 00000000000..ba5dd172244 --- /dev/null +++ b/website/static/img/icons/white/book.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/calendar.svg b/website/static/img/icons/white/calendar.svg new file mode 100644 index 00000000000..60a70d916d9 --- /dev/null +++ b/website/static/img/icons/white/calendar.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/computer.svg b/website/static/img/icons/white/computer.svg new file mode 100644 index 00000000000..cfea44ec81a --- /dev/null +++ b/website/static/img/icons/white/computer.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/discussions.svg b/website/static/img/icons/white/discussions.svg new file mode 100644 index 00000000000..099865eeeb9 --- /dev/null +++ b/website/static/img/icons/white/discussions.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/docs.svg b/website/static/img/icons/white/docs.svg new file mode 100644 index 00000000000..fc3b6b56345 --- /dev/null +++ b/website/static/img/icons/white/docs.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/folder.svg b/website/static/img/icons/white/folder.svg new file mode 100644 index 00000000000..44cea3048a4 --- /dev/null +++ b/website/static/img/icons/white/folder.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/guides.svg b/website/static/img/icons/white/guides.svg new file mode 100644 index 00000000000..c8796c30303 --- /dev/null +++ b/website/static/img/icons/white/guides.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/packages.svg b/website/static/img/icons/white/packages.svg new file mode 100644 index 00000000000..ff6d2f4fc50 --- /dev/null +++ b/website/static/img/icons/white/packages.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/pencil-paper.svg b/website/static/img/icons/white/pencil-paper.svg new file mode 100644 index 00000000000..252e9257c5d --- /dev/null +++ b/website/static/img/icons/white/pencil-paper.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/question-mark.svg b/website/static/img/icons/white/question-mark.svg new file mode 100644 index 00000000000..75e18753eda --- /dev/null +++ b/website/static/img/icons/white/question-mark.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/rocket.svg b/website/static/img/icons/white/rocket.svg new file mode 100644 index 00000000000..d582c3455db --- /dev/null +++ b/website/static/img/icons/white/rocket.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/slack.svg b/website/static/img/icons/white/slack.svg new file mode 100644 index 00000000000..2582536472a --- /dev/null +++ b/website/static/img/icons/white/slack.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/smiley-face.svg b/website/static/img/icons/white/smiley-face.svg new file mode 100644 index 00000000000..8a901ad589a --- /dev/null +++ b/website/static/img/icons/white/smiley-face.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/icons/white/star.svg b/website/static/img/icons/white/star.svg new file mode 100644 index 00000000000..f7ced226537 --- /dev/null +++ b/website/static/img/icons/white/star.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/moon.svg b/website/static/img/moon.svg new file mode 100644 index 00000000000..09b2a867b79 --- /dev/null +++ b/website/static/img/moon.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/static/img/node_color_example.png b/website/static/img/node_color_example.png new file mode 100644 index 00000000000..83b26f5735a Binary files /dev/null and b/website/static/img/node_color_example.png differ diff --git a/website/static/img/redshift_tutorial/images/dbt_cloud_redshift_account_settings.png b/website/static/img/redshift_tutorial/images/dbt_cloud_redshift_account_settings.png new file mode 100644 index 00000000000..38ec200ed91 Binary files /dev/null and b/website/static/img/redshift_tutorial/images/dbt_cloud_redshift_account_settings.png differ diff --git a/website/static/img/redshift_tutorial/images/dbt_cloud_redshift_development_credentials.png b/website/static/img/redshift_tutorial/images/dbt_cloud_redshift_development_credentials.png new file mode 100644 index 00000000000..523ba9f0c6c Binary files /dev/null and b/website/static/img/redshift_tutorial/images/dbt_cloud_redshift_development_credentials.png differ diff --git a/website/static/img/redshift_tutorial/images/dbt_cloud_setup_redshift_connection_start.png b/website/static/img/redshift_tutorial/images/dbt_cloud_setup_redshift_connection_start.png new file mode 100644 index 00000000000..8d2e5ffcb70 Binary files /dev/null and b/website/static/img/redshift_tutorial/images/dbt_cloud_setup_redshift_connection_start.png differ diff --git a/website/static/img/redshift_tutorial/images/setup_redshift_connect.png b/website/static/img/redshift_tutorial/images/setup_redshift_connect.png deleted file mode 100644 index de6e789494c..00000000000 Binary files a/website/static/img/redshift_tutorial/images/setup_redshift_connect.png and /dev/null differ diff --git a/website/static/img/snowflake_tutorial/dbt_cloud_setup_snowflake_connection_start.png b/website/static/img/snowflake_tutorial/dbt_cloud_setup_snowflake_connection_start.png new file mode 100644 index 00000000000..34e83360c7e Binary files /dev/null and b/website/static/img/snowflake_tutorial/dbt_cloud_setup_snowflake_connection_start.png differ diff --git a/website/static/img/snowflake_tutorial/dbt_cloud_snowflake_account_settings.png b/website/static/img/snowflake_tutorial/dbt_cloud_snowflake_account_settings.png index 1f3c24e6e7e..9084558d601 100644 Binary files a/website/static/img/snowflake_tutorial/dbt_cloud_snowflake_account_settings.png and b/website/static/img/snowflake_tutorial/dbt_cloud_snowflake_account_settings.png differ diff --git a/website/static/img/snowflake_tutorial/dbt_cloud_snowflake_development_credentials.png b/website/static/img/snowflake_tutorial/dbt_cloud_snowflake_development_credentials.png index e4fdf76d1c9..1e6187e2b2a 100644 Binary files a/website/static/img/snowflake_tutorial/dbt_cloud_snowflake_development_credentials.png and b/website/static/img/snowflake_tutorial/dbt_cloud_snowflake_development_credentials.png differ diff --git a/website/static/img/structure-dbt-projects.png b/website/static/img/structure-dbt-projects.png new file mode 100644 index 00000000000..a4d85c47460 Binary files /dev/null and b/website/static/img/structure-dbt-projects.png differ diff --git a/website/static/js/onetrust.js b/website/static/js/onetrust.js new file mode 100644 index 00000000000..f7fbcbd51c6 --- /dev/null +++ b/website/static/js/onetrust.js @@ -0,0 +1,6 @@ +function openPreferenceCenter() { + if(window?.OneTrust?.ToggleInfoDisplay) { + console.log('opening center') + window.OneTrust.ToggleInfoDisplay() + } +}
    unique_id + unique_id calendar_date + calendar_date ad_id + ad_id impressions + impressions spend + spend clicks + clicks conversions + conversions