diff --git a/.github/workflows/chores.yml b/.github/workflows/chores.yml index f069df1417..9fcb788995 100644 --- a/.github/workflows/chores.yml +++ b/.github/workflows/chores.yml @@ -7,36 +7,34 @@ on: jobs: check_api_versions: - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu outputs: hub_version: ${{ steps.devhub-api-version.outputs.hub_version }} cci_version: ${{ steps.cci-api-version.outputs.cci_version }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: main - - name: Set up Python - uses: actions/setup-python@v4 - name: Get Dev Hub API Version id: devhub-api-version env: HUB_URL: ${{ format('{0}/services/data', secrets.SFDO_HUB_URL) }} run: | version=$(curl -s $HUB_URL | jq -r '.[-1] | .version') - echo "::set-output name=hub_version::$version" + echo "hub_version=$version" >> $GITHUB_OUTPUT - name: Get CURRENT_SF_API_VERSION id: cci-api-version run: | version=$(yq '.project.package.api_version' cumulusci/cumulusci.yml) - echo "::set-output name=cci_version::$version" + echo "cci_version=$version" >> $GITHUB_OUTPUT update_api_versions: - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu needs: check_api_versions if: ${{ needs.check_api_versions.outputs.hub_version != needs.check_api_versions.outputs.cci_version }} env: VERSION: ${{ needs.check_api_versions.outputs.hub_version }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 ref: main @@ -58,3 +56,15 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | gh pr create --fill --label 'auto-pr' + test_sfdx_release_candidate: + uses: ./.github/workflows/release_test_sfdx.yml + with: + sfdx-release-channel: stable-rc + secrets: + CUMULUSCI_ORG_packaging: ${{ secrets.CUMULUSCI_ORG_packaging }} + CUMULUSCI_SERVICE_github: ${{ secrets.CUMULUSCI_SERVICE_github }} + CCITEST_APP_KEY: ${{ secrets.CCITEST_APP_KEY }} + SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} + SFDX_HUB_KEY: ${{ secrets.SFDX_HUB_KEY }} + SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} + SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} diff --git a/.github/workflows/feature_test.yml b/.github/workflows/feature_test.yml index 3758e4ea6b..9433041f85 100644 --- a/.github/workflows/feature_test.yml +++ b/.github/workflows/feature_test.yml @@ -11,28 +11,33 @@ on: jobs: lint: name: Lint - if: ${{ github.event_name == 'pull_request' }} + if: ${{ contains(fromJSON('["workflow_dispatch", "pull_request"]'), github.event_name) }} uses: SFDO-Tooling/.github/.github/workflows/pre-commit.yml@main docs: name: Build Docs if: ${{ github.event_name == 'pull_request' }} - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu steps: - name: "Checkout" - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: fetch-depth: 1 - - name: Set up Python 3.8 + - name: Set up Python 3.11 id: py uses: actions/setup-python@v4 with: - python-version: 3.8 - cache: pip - cache-dependency-path: "requirements/*.txt" + python-version: 3.11 + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true - name: Install dependencies - run: pip install -r requirements_dev.txt + run: uv sync --group docs - name: Build Docs - run: make docs + run: | + cd docs + uv run sphinx-build -b html . ./_build unit_tests: name: "Unit tests: ${{ matrix.os }}-${{ matrix.python-version }}" @@ -40,34 +45,40 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-latest, sfdc-ubuntu-latest, sfdc-windows-latest] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + os: [macos-latest, SFDO-Tooling-Ubuntu, SFDO-Tooling-Windows] + python-version: ["3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: python-version: "${{ matrix.python-version }}" - cache: pip - cache-dependency-path: "requirements/*.txt" + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true - name: Install dependencies - run: pip install -r requirements_dev.txt + run: uv sync -p ${{ matrix.python-version }} - name: Run Pytest - run: pytest --cov-report= --cov=cumulusci + run: uv run pytest --cov-report= --cov=cumulusci robot_api: name: "Robot: No browser" - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - uses: actions/checkout@v4 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 - cache: pip - cache-dependency-path: "requirements/*.txt" - - name: Install Python dependencies - run: pip install -r requirements_dev.txt + python-version: 3.11 + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true + - name: Install dependencies + run: uv sync -p 3.11 - name: Install sfdx run: | mkdir sfdx @@ -75,16 +86,16 @@ jobs: echo $(realpath sfdx/bin) >> $GITHUB_PATH - name: Authenticate Dev Hub run: | - sfdx plugins --core + sf plugins --core echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key - sfdx auth:jwt:grant --clientid $SFDX_CLIENT_ID --jwtkeyfile sfdx.key --username $SFDX_HUB_USERNAME --setdefaultdevhubusername -a hub + sf org login jwt --client-id $SFDX_CLIENT_ID --jwt-key-file sfdx.key --username $SFDX_HUB_USERNAME --setdefaultdevhubusername -a hub env: SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} - name: Run robot tests run: | - coverage run --append $(which cci) task run robot \ + uv run cci task run robot \ --org dev \ -o name "CumulusCI" \ -o suites cumulusci/robotframework/tests \ @@ -92,10 +103,10 @@ jobs: - name: Delete scratch org if: always() run: | - cci org scratch_delete dev + uv run cci org scratch_delete dev - name: Store robot results if: failure() - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 with: name: robot path: robot/CumulusCI/results diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 931de3a290..4f7c52d0aa 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -19,13 +19,13 @@ on: jobs: generate-changelog: name: Create a PR to update version and release notes - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@main - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - name: Install build tool run: python -m pip install hatch diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 977eef7b6f..8050a53038 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,18 +12,16 @@ concurrency: publishing jobs: publish-to-pypi: name: Publish new release to PyPI - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@main - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - name: Install build tools run: python -m pip install hatch tomli tomli-w - - name: Pin dependencies - run: python utility/pin_dependencies.py - name: Build source tarball and binary wheel run: hatch build -c - name: Upload to PyPI diff --git a/.github/workflows/release_test.yml b/.github/workflows/release_test.yml index 683daf7f02..b3d7c2a917 100644 --- a/.github/workflows/release_test.yml +++ b/.github/workflows/release_test.yml @@ -1,48 +1,22 @@ name: Release Test on: + workflow_dispatch: pull_request: types: [opened, synchronize, reopened] # Default - workflow_call: - secrets: - CUMULUSCI_ORG_packaging: - required: true - CUMULUSCI_SERVICE_github: - required: true - CCITEST_APP_KEY: - required: true - SFDX_CLIENT_ID: - required: true - SFDX_HUB_KEY: - required: true - SFDX_HUB_KEY_BASE64: - required: true - SFDX_HUB_USERNAME: - required: true - -env: - CUMULUSCI_ORG_scratch: '{"config_file": "orgs/dev.json", "scratch": true}' - CUMULUSCI_ORG_packaging: ${{ secrets.CUMULUSCI_ORG_packaging }} - CUMULUSCI_SERVICE_github: ${{ secrets.CUMULUSCI_SERVICE_github }} - GITHUB_APP_ID: 129383 - GITHUB_APP_KEY: ${{ secrets.CCITEST_APP_KEY }} - SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} - SFDX_HUB_KEY: ${{ secrets.SFDX_HUB_KEY }} - SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} - SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} jobs: test_artifacts: name: "Test Package Artifacts" - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@v3 - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - cache-dependency-path: "requirements/*.txt" + cache-dependency-path: "pyproject.toml" - name: Install build tools run: pip install hatch - name: Test source tarball and binary wheel @@ -67,45 +41,14 @@ jobs: test_release: name: "Test Release Flows" - runs-on: sfdc-ubuntu-latest - concurrency: release - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.8 - uses: actions/setup-python@v4 - with: - python-version: 3.8 - cache: pip - cache-dependency-path: "requirements/*.txt" - - name: Install Python dependencies - run: pip install -r requirements_dev.txt - - name: Install sfdx - run: | - mkdir sfdx - wget -qO- https://developer.salesforce.com/media/salesforce-cli/sfdx/channels/stable/sfdx-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 - echo $(realpath sfdx/bin) >> $GITHUB_PATH - - name: Authenticate Dev Hub - run: | - sfdx plugins --core - echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key - sfdx auth:jwt:grant --clientid $SFDX_CLIENT_ID --jwtkeyfile sfdx.key --username $SFDX_HUB_USERNAME --setdefaultdevhubusername -a hub - - name: Check out CumulusCI-Test - run: | - git clone https://github.com/SFDO-Tooling/CumulusCI-Test - - name: Run ci_feature flow - run: | - cd CumulusCI-Test - coverage run --append --rcfile=../pyproject.toml --source=../cumulusci $(which cci) flow run ci_feature --org scratch --delete-org - - name: Run ci_beta flow - run: | - cd CumulusCI-Test - coverage run --append --rcfile=../pyproject.toml --source=../cumulusci $(which cci) flow run ci_beta --org scratch --delete-org - - name: Run ci_master flow - run: | - cd CumulusCI-Test - coverage run --append --rcfile=../pyproject.toml --source=../cumulusci $(which cci) flow run ci_master --org scratch --delete-org - - name: Run release_beta flow - run: | - export SFDX_HUB_KEY="$(echo $SFDX_HUB_KEY_BASE64 | base64 --decode)" - cd CumulusCI-Test - coverage run --append --rcfile=../pyproject.toml --source=../cumulusci $(which cci) flow run release_beta --org packaging + uses: ./.github/workflows/release_test_sfdx.yml + with: + sfdx-release-channel: stable + secrets: + CUMULUSCI_ORG_packaging: ${{ secrets.CUMULUSCI_ORG_packaging }} + CUMULUSCI_SERVICE_github: ${{ secrets.CUMULUSCI_SERVICE_github }} + CCITEST_APP_KEY: ${{ secrets.CCITEST_APP_KEY }} + SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} + SFDX_HUB_KEY: ${{ secrets.SFDX_HUB_KEY }} + SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} + SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} diff --git a/.github/workflows/release_test_sfdx.yml b/.github/workflows/release_test_sfdx.yml new file mode 100644 index 0000000000..00eb55719e --- /dev/null +++ b/.github/workflows/release_test_sfdx.yml @@ -0,0 +1,88 @@ +name: SFDX Integration Test + +on: + workflow_call: + inputs: + sfdx-release-channel: + required: false + type: string + default: stable + secrets: + CUMULUSCI_ORG_packaging: + required: true + CUMULUSCI_SERVICE_github: + required: true + CCITEST_APP_KEY: + required: true + SFDX_CLIENT_ID: + required: true + SFDX_HUB_KEY: + required: true + SFDX_HUB_KEY_BASE64: + required: true + SFDX_HUB_USERNAME: + required: true + +env: + CUMULUSCI_ORG_scratch: '{"config_file": "orgs/dev.json", "scratch": true}' + CUMULUSCI_ORG_packaging: ${{ secrets.CUMULUSCI_ORG_packaging }} + CUMULUSCI_SERVICE_github: ${{ secrets.CUMULUSCI_SERVICE_github }} + GITHUB_APP_ID: 129383 + GITHUB_APP_KEY: ${{ secrets.CCITEST_APP_KEY }} + SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} + SFDX_HUB_KEY: ${{ secrets.SFDX_HUB_KEY }} + SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} + SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} + +jobs: + test_release: + name: "Test SFDX CLI" + runs-on: SFDO-Tooling-Ubuntu + concurrency: release + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + cache: pip + cache-dependency-path: "pyproject.toml" + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true + - name: Install Python dependencies + run: uv sync + - name: Install Salesforce CLI + env: + CHANNEL: ${{ inputs.sfdx-release-channel }} + run: | + mkdir sfdx + wget -qO- https://developer.salesforce.com/media/salesforce-cli/sf/channels/$CHANNEL/sf-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 + echo $(realpath sfdx/bin) >> $GITHUB_PATH + - name: Authenticate Dev Hub + run: | + sf plugins --core + echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key + sf org login jwt --client-id $SFDX_CLIENT_ID --jwt-key-file sfdx.key --username $SFDX_HUB_USERNAME --set-default-dev-hub --alias hub + - name: Check out CumulusCI-Test + run: | + git clone https://github.com/SFDO-Tooling/CumulusCI-Test + - name: Run ci_feature flow + run: | + cd CumulusCI-Test + uv run cci flow run ci_feature --org scratch --delete-org + - name: Run ci_beta flow + run: | + cd CumulusCI-Test + uv run cci flow run ci_beta --org scratch --delete-org + - name: Run ci_master flow + run: | + cd CumulusCI-Test + uv run cci flow run ci_master --org scratch --delete-org + - name: Run release_beta flow + run: | + export SFDX_HUB_KEY="$(echo $SFDX_HUB_KEY_BASE64 | base64 --decode)" + cd CumulusCI-Test + uv run cci flow run release_beta --org packaging diff --git a/.github/workflows/slow_integration_tests.yml b/.github/workflows/slow_integration_tests.yml index 913befe1bb..73c27c5767 100644 --- a/.github/workflows/slow_integration_tests.yml +++ b/.github/workflows/slow_integration_tests.yml @@ -22,29 +22,32 @@ env: jobs: org_backed_tests: name: "Org-connected Tests" - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - cache-dependency-path: "requirements/*.txt" - - name: Install Python dependencies - run: | - python -m pip install -U pip - pip install -r requirements_dev.txt - - name: Install sfdx + cache-dependency-path: "pyproject.toml" + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true + - name: Install dependencies + run: uv sync -p 3.11 + - name: Install Salesforce CLI run: | mkdir sfdx - wget -qO- https://developer.salesforce.com/media/salesforce-cli/sfdx/channels/stable/sfdx-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 + wget -qO- https://developer.salesforce.com/media/salesforce-cli/sf/channels/stable/sf-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 echo $(realpath sfdx/bin) >> $GITHUB_PATH - name: Authenticate Dev Hub run: | - sfdx plugins --core + sf plugins --core echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key - sfdx auth:jwt:grant --clientid $SFDX_CLIENT_ID --jwtkeyfile sfdx.key --username $SFDX_HUB_USERNAME --setdefaultdevhubusername -a hub + sf org login jwt --client-id $SFDX_CLIENT_ID --jwt-key-file sfdx.key --username $SFDX_HUB_USERNAME --set-default-dev-hub --alias hub env: SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} @@ -54,10 +57,10 @@ jobs: - name: Delete scratch org if: always() run: | - cci org scratch_delete pytest + uv run cci org scratch_delete pytest robot_ui: name: "Robot: ${{ matrix.job-name }}" - runs-on: sfdc-ubuntu-latest + runs-on: SFDO-Tooling-Ubuntu strategy: fail-fast: false matrix: @@ -73,33 +76,33 @@ jobs: # org-shape: "prerelease" steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - cache-dependency-path: "requirements/*.txt" + cache-dependency-path: "pyproject.toml" - name: Install Python dependencies - run: pip install -r requirements_dev.txt - - name: Install sfdx + run: pip install . + - name: Install Salesforce CLI run: | mkdir sfdx - wget -qO- https://developer.salesforce.com/media/salesforce-cli/sfdx/channels/stable/sfdx-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 + wget -qO- https://developer.salesforce.com/media/salesforce-cli/sf/channels/stable/sf-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 echo $(realpath sfdx/bin) >> $GITHUB_PATH - name: Initialize Browser/Playwright run: cci robot install_playwright - name: Authenticate Dev Hub run: | - sfdx plugins --core + sf plugins --core echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key - sfdx auth:jwt:grant --clientid $SFDX_CLIENT_ID --jwtkeyfile sfdx.key --username $SFDX_HUB_USERNAME --setdefaultdevhubusername -a hub + sf org login jwt --client-id $SFDX_CLIENT_ID --jwt-key-file sfdx.key --username $SFDX_HUB_USERNAME --set-default-dev-hub --alias hub env: SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} - name: Run robot tests run: | - coverage run --append $(which cci) task run robot \ + cci task run robot \ --org ${{ matrix.org-shape }} \ -o suites cumulusci/robotframework/tests/salesforce \ -o exclude no-browser \ @@ -110,7 +113,7 @@ jobs: cci org scratch_delete ${{ matrix.org-shape }} - name: Store robot results if: failure() - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 with: name: robot path: robot/CumulusCI/results diff --git a/.github/workflows/update_dependencies.yml b/.github/workflows/update_dependencies.yml index 0e83d8400c..9cf9e0474c 100644 --- a/.github/workflows/update_dependencies.yml +++ b/.github/workflows/update_dependencies.yml @@ -7,4 +7,4 @@ jobs: update_python_dependencies: uses: SFDO-Tooling/.github/.github/workflows/update_python_dependencies.yml@main with: - python-version: 3.8 + python-version: 3.11 diff --git a/.prettierignore b/.prettierignore index e69de29bb2..329331137b 100644 --- a/.prettierignore +++ b/.prettierignore @@ -0,0 +1 @@ +Test*.yaml \ No newline at end of file diff --git a/readthedocs.yml b/.readthedocs.yml similarity index 59% rename from readthedocs.yml rename to .readthedocs.yml index 041aaf738f..4f0f038758 100644 --- a/readthedocs.yml +++ b/.readthedocs.yml @@ -7,9 +7,15 @@ version: 2 # Set the version of Python and other tools you might need build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: - python: "3.9" + python: "3.12" + commands: + - asdf plugin add uv + - asdf install uv latest + - asdf global uv latest + - uv sync --only-group docs --frozen + - uv run -m sphinx -T -b html -d docs/_build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html # Build documentation in the docs/ directory with Sphinx sphinx: @@ -19,8 +25,3 @@ sphinx: formats: - pdf - epub - -# Optionally declare the Python requirements required to build your docs -python: - install: - - requirements: requirements_dev.txt diff --git a/Makefile b/Makefile index cce895e0c8..19b7ada463 100644 --- a/Makefile +++ b/Makefile @@ -64,13 +64,13 @@ coverage: ## check code coverage quickly with the default Python $(BROWSER) htmlcov/index.html vcr: # remake VCR cassettes and run other integration tests - cci org scratch qa pytest - cci org scratch_delete pytest + uv run cci org scratch qa pytest + uv run cci org scratch_delete pytest find . -name \Test*.yaml | xargs rm - pytest --org qa --run-slow-tests -rs --replace-vcrs + uv run pytest --org qa --run-slow-tests -rs --replace-vcrs slow_tests: vcr # remake VCR cassettes and run other integration tests - cci org scratch_delete pytest + uv run cci org scratch_delete pytest pytest integration_tests/ --org pytest -rs docs: ## generate Sphinx HTML documentation diff --git a/cumulusci/__about__.py b/cumulusci/__about__.py index 5bdb2805dd..76ad18b89a 100644 --- a/cumulusci/__about__.py +++ b/cumulusci/__about__.py @@ -1 +1 @@ -__version__ = "3.84.3" +__version__ = "4.0.1" diff --git a/cumulusci/cli/tests/test_error.py b/cumulusci/cli/tests/test_error.py index 62dd2825e8..094a767896 100644 --- a/cumulusci/cli/tests/test_error.py +++ b/cumulusci/cli/tests/test_error.py @@ -98,6 +98,7 @@ def test_error_gist( ) webbrowser_open.assert_called_once_with(expected_gist_url) + @pytest.mark.skipif(sys.version_info > (3, 11), reason="requires python3.10 or higher") @mock.patch("cumulusci.cli.error.platform") @mock.patch("cumulusci.cli.error.sys") @mock.patch("cumulusci.cli.error.datetime") diff --git a/cumulusci/cli/tests/test_org.py b/cumulusci/cli/tests/test_org.py index dfa7874c9e..c85f2e0507 100644 --- a/cumulusci/cli/tests/test_org.py +++ b/cumulusci/cli/tests/test_org.py @@ -730,7 +730,6 @@ def test_org_list(self, cli_tbl): ], title="Connected Orgs", ) - assert scratch_table_call in cli_tbl.call_args_list assert connected_table_call in cli_tbl.call_args_list runtime.keychain.cleanup_org_cache_dirs.assert_called_once() diff --git a/cumulusci/core/config/org_config.py b/cumulusci/core/config/org_config.py index b3f191d172..e179fbbe3b 100644 --- a/cumulusci/core/config/org_config.py +++ b/cumulusci/core/config/org_config.py @@ -3,6 +3,7 @@ from collections import defaultdict, namedtuple from contextlib import contextmanager from datetime import date, datetime +from typing import Optional from urllib.parse import urlparse import requests @@ -47,14 +48,12 @@ class OrgConfig(BaseConfig): is_sandbox: bool namespace: str namespaced: bool - org_id: str org_type: str password: str scratch: bool scratch_org_type: str set_password: bool sfdx_alias: str - username: str userinfo: str id: str active: bool @@ -63,8 +62,9 @@ class OrgConfig(BaseConfig): refresh_token: str client_secret: str connected_app: str + serialization_format: str - createable: bool = None + createable: Optional[bool] = None # make sure it can be mocked for tests OAuth2Client = OAuth2Client @@ -204,7 +204,15 @@ def user_id(self): @property def org_id(self): - return self.id.split("/")[-2] + try: + if org_id := self.config.get("org_id"): + return org_id + elif hasattr(self, "id") and self.id: + return self.id.split("/")[-2] + else: + return None + except Exception as e: # pragma: no cover + assert e is None, e @property def username(self): @@ -254,7 +262,7 @@ def populate_expiration_date(self): @property def organization_sobject(self): """Cached copy of Organization sObject. Does not perform API call.""" - return self._org_sobject + return getattr(self, "_org_sobject", None) def _fetch_community_info(self): """Use the API to re-fetch information about communities""" @@ -317,7 +325,8 @@ def installed_packages(self): To check if a required package is present, call `has_minimum_package_version()` with either the namespace or 033 Id of the desired package and its version, in 1.2.3 format. - Beta version of a package are represented as "1.2.3b5", where 5 is the build number.""" + Beta version of a package are represented as "1.2.3b5", where 5 is the build number. + """ if self._installed_packages is None: isp_result = self.salesforce_client.restful( "tooling/query/?q=SELECT SubscriberPackage.Id, SubscriberPackage.NamespacePrefix, " diff --git a/cumulusci/core/config/scratch_org_config.py b/cumulusci/core/config/scratch_org_config.py index edd0c18807..d79f7fcb66 100644 --- a/cumulusci/core/config/scratch_org_config.py +++ b/cumulusci/core/config/scratch_org_config.py @@ -61,7 +61,7 @@ def days_alive(self) -> Optional[int]: return delta.days + 1 def create_org(self) -> None: - """Uses sfdx force:org:create to create the org""" + """Uses sf org create scratch to create the org""" if not self.config_file: raise ScratchOrgException( f"Scratch org config {self.name} is missing a config_file" @@ -72,7 +72,7 @@ def create_org(self) -> None: args: List[str] = self._build_org_create_args() extra_args = os.environ.get("SFDX_ORG_CREATE_ARGS", "") p: sarge.Command = sfdx( - f"force:org:create --json {extra_args}", + f"org create scratch --json {extra_args}", args=args, username=None, log_note="Creating scratch org", @@ -139,33 +139,32 @@ def _build_org_create_args(self) -> List[str]: args = ["-f", self.config_file, "-w", "120"] devhub_username: Optional[str] = self._choose_devhub_username() if devhub_username: - args += ["--targetdevhubusername", devhub_username] + args += ["--target-dev-hub", devhub_username] if not self.namespaced: - args += ["-n"] + args += ["--no-namespace"] if self.noancestors: - args += ["--noancestors"] + args += ["--no-ancestors"] if self.days: - args += ["--durationdays", str(self.days)] + args += ["--duration-days", str(self.days)] if self.release: - args += [f"release={self.release}"] + args += [f"--release={self.release}"] if self.sfdx_alias: args += ["-a", self.sfdx_alias] with open(self.config_file, "r") as org_def: org_def_data = json.load(org_def) org_def_has_email = "adminEmail" in org_def_data if self.email_address and not org_def_has_email: - args += [f"adminEmail={self.email_address}"] + args += [f"--admin-email={self.email_address}"] if self.default: - args += ["-s"] - if instance := self.instance or os.environ.get("SFDX_SIGNUP_INSTANCE"): - args += [f"instance={instance}"] + args += ["--set-default"] + return args def _choose_devhub_username(self) -> Optional[str]: """Determine which devhub username to specify when calling sfdx, if any.""" # If a devhub was specified via `cci org scratch`, use it. # (This will return None if "devhub" isn't set in the org config, - # in which case sfdx will use its defaultdevhubusername.) + # in which case sf will use its target-dev-hub.) devhub_username = self.devhub if not devhub_username and self.keychain is not None: # Otherwise see if one is configured via the "devhub" service @@ -178,7 +177,7 @@ def _choose_devhub_username(self) -> Optional[str]: return devhub_username def generate_password(self) -> None: - """Generates an org password with: sfdx force:user:password:generate. + """Generates an org password with: sf org generate password. On a non-zero return code, set the password_failed in our config and log the output (stdout/stderr) from sfdx.""" @@ -187,7 +186,7 @@ def generate_password(self) -> None: return p: sarge.Command = sfdx( - "force:user:password:generate", + "org generate password", self.username, log_note="Generating scratch org user password", ) @@ -214,13 +213,13 @@ def can_delete(self) -> bool: return bool(self.date_created) def delete_org(self) -> None: - """Uses sfdx force:org:delete to delete the org""" + """Uses sf org delete scratch to delete the org""" if not self.created: self.logger.info("Skipping org deletion: the scratch org does not exist.") return p: sarge.Command = sfdx( - "force:org:delete -p", self.username, "Deleting scratch org" + "org delete scratch -p", self.username, "Deleting scratch org" ) sfdx_output: List[str] = list(p.stdout_text) + list(p.stderr_text) diff --git a/cumulusci/core/config/sfdx_org_config.py b/cumulusci/core/config/sfdx_org_config.py index c466778047..dcf79b0a1b 100644 --- a/cumulusci/core/config/sfdx_org_config.py +++ b/cumulusci/core/config/sfdx_org_config.py @@ -27,9 +27,9 @@ def sfdx_info(self): if not self.print_json: self.logger.info(f"Getting org info from Salesforce CLI for {username}") - # Call force:org:display and parse output to get instance_url and + # Call org display and parse output to get instance_url and # access_token - p = sfdx("force:org:display --json", self.username) + p = sfdx("org display --json", self.username) org_info = None stderr_list = [line.strip() for line in p.stderr_text] @@ -166,7 +166,7 @@ def get_access_token(self, **userfields): else: username = result[0]["Username"] - p = sfdx(f"force:org:display --targetusername={username} --json") + p = sfdx(f"org display --target-org={username} --json") if p.returncode: output = p.stdout_text.read() try: @@ -183,9 +183,9 @@ def get_access_token(self, **userfields): return info["result"]["accessToken"] def force_refresh_oauth_token(self): - # Call force:org:display and parse output to get instance_url and + # Call org display and parse output to get instance_url and # access_token - p = sfdx("force:org:open -r", self.username, log_note="Refreshing OAuth token") + p = sfdx("org open -r", self.username, log_note="Refreshing OAuth token") stdout_list = [line.strip() for line in p.stdout_text] @@ -198,7 +198,7 @@ def force_refresh_oauth_token(self): # Added a print json argument to check whether it is there or not def refresh_oauth_token(self, keychain, print_json=False): - """Use sfdx force:org:describe to refresh token instead of built in OAuth handling""" + """Use sfdx org display to refresh token instead of built in OAuth handling""" if hasattr(self, "_sfdx_info"): # Cache the sfdx_info for 1 hour to avoid unnecessary calls out to sfdx CLI delta = datetime.datetime.utcnow() - self._sfdx_info_date @@ -208,7 +208,7 @@ def refresh_oauth_token(self, keychain, print_json=False): # Force a token refresh self.force_refresh_oauth_token() self.print_json = print_json - # Get org info via sfdx force:org:display + # Get org info via sf org display self.sfdx_info # Get additional org info by querying API self._load_orginfo() diff --git a/cumulusci/core/config/tests/test_config.py b/cumulusci/core/config/tests/test_config.py index 9656964daa..77973526dd 100644 --- a/cumulusci/core/config/tests/test_config.py +++ b/cumulusci/core/config/tests/test_config.py @@ -66,7 +66,7 @@ def test_getattr_toplevel_key_missing(self): assert config.foo is None with mock.patch( "cumulusci.core.config.base_config.STRICT_GETATTR", True - ), pytest.raises(AssertionError): + ), pytest.deprecated_call(), pytest.raises(AssertionError): assert config.foo is None def test_getattr_child_key(self): diff --git a/cumulusci/core/config/tests/test_config_expensive.py b/cumulusci/core/config/tests/test_config_expensive.py index 7c3e879fca..5003d4eceb 100644 --- a/cumulusci/core/config/tests/test_config_expensive.py +++ b/cumulusci/core/config/tests/test_config_expensive.py @@ -376,7 +376,7 @@ def test_get_access_token(self, Command): with mock.patch("cumulusci.core.config.sfdx_org_config.sfdx", sfdx): access_token = config.get_access_token(alias="dadvisor") sfdx.assert_called_once_with( - "force:org:display --targetusername=whatever@example.com --json" + "org display --target-org=whatever@example.com --json" ) assert access_token == "the-token" @@ -792,7 +792,6 @@ def test_build_org_create_args(self, scratch_def_file): "noancestors": True, "sfdx_alias": "project__org", "default": True, - "instance": "NA01", "release": "previous", }, "test", @@ -804,18 +803,17 @@ def test_build_org_create_args(self, scratch_def_file): "tmp.json", "-w", "120", - "--targetdevhubusername", + "--target-dev-hub", "fake@fake.devhub", - "-n", - "--noancestors", - "--durationdays", + "--no-namespace", + "--no-ancestors", + "--duration-days", "1", - "release=previous", + "--release=previous", "-a", "project__org", - "adminEmail=test@example.com", - "-s", - "instance=NA01", + "--admin-email=test@example.com", + "--set-default", ] def test_build_org_create_args__email_in_scratch_def(self): diff --git a/cumulusci/core/dependencies/dependencies.py b/cumulusci/core/dependencies/dependencies.py index 7a08dbdc6f..3301cdb8a5 100644 --- a/cumulusci/core/dependencies/dependencies.py +++ b/cumulusci/core/dependencies/dependencies.py @@ -544,6 +544,7 @@ class UnmanagedDependency(StaticDependency, abc.ABC): subfolder: Optional[str] = None namespace_inject: Optional[str] = None namespace_strip: Optional[str] = None + collision_check: Optional[bool] = None def _get_unmanaged(self, org: OrgConfig): if self.unmanaged is None: diff --git a/cumulusci/core/dependencies/tests/test_dependencies.py b/cumulusci/core/dependencies/tests/test_dependencies.py index c3eb329d8f..e0c757c7aa 100644 --- a/cumulusci/core/dependencies/tests/test_dependencies.py +++ b/cumulusci/core/dependencies/tests/test_dependencies.py @@ -645,6 +645,7 @@ def test_install(self, api_deploy_mock, zip_builder_mock, download_mock): assert mock_task.project_config == context api_deploy_mock.return_value.assert_called_once() + zf.close() def test_get_unmanaged(self): org = mock.Mock() @@ -733,6 +734,7 @@ def test_install(self, api_deploy_mock, zip_builder_mock, download_mock): assert mock_task.project_config == context api_deploy_mock.return_value.assert_called_once() + zf.close() def test_get_unmanaged(self): org = mock.Mock() @@ -793,6 +795,7 @@ def test_get_metadata_package_zip_builder__mdapi_root( }, context=mock.ANY, ) + zf.close() @mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder") @mock.patch("cumulusci.core.dependencies.dependencies.download_extract_zip") @@ -827,6 +830,7 @@ def test_get_metadata_package_zip_builder__mdapi_subfolder( }, context=mock.ANY, ) + zf.close() @mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder") @mock.patch("cumulusci.core.dependencies.dependencies.download_extract_zip") @@ -861,11 +865,12 @@ def test_get_metadata_package_zip_builder__sfdx( context=mock.ANY, ) sfdx_mock.assert_called_once_with( - "force:source:convert", + "project convert source", args=["-d", mock.ANY, "-r", "force-app"], capture_output=True, check_return=True, ) + zf.close() class TestParseDependency: @@ -903,3 +908,13 @@ def test_parse_unmanaged_dependency(self): } ) assert isinstance(u, UnmanagedZipURLDependency) + + u = parse_dependency( + { + "github": "https://github.com/Test/TestRepo", + "ref": "aaaaaaaa", + "collision_check": False, + "namespace_inject": "ns", + } + ) + assert isinstance(u, UnmanagedGitHubRefDependency) diff --git a/cumulusci/core/keychain/base_project_keychain.py b/cumulusci/core/keychain/base_project_keychain.py index 561caeaec7..0c1d0a6763 100644 --- a/cumulusci/core/keychain/base_project_keychain.py +++ b/cumulusci/core/keychain/base_project_keychain.py @@ -96,11 +96,7 @@ def set_default_org(self, name): org.config["default"] = True org.save() if org.created: - sfdx( - sarge.shell_format( - "force:config:set defaultusername={}", org.sfdx_alias - ) - ) + sfdx(sarge.shell_format("force config set target-org={}", org.sfdx_alias)) def unset_default_org(self): """unset the default orgs for tasks""" @@ -110,7 +106,7 @@ def unset_default_org(self): if org_config.default: del org_config.config["default"] org_config.save() - sfdx("force:config:set defaultusername=") + sfdx("config unset target-org") # This implementation of get_default_org, set_default_org, and unset_default_org # is currently kept for backwards compatibility, but EncryptedFileProjectKeychain diff --git a/cumulusci/core/runtime.py b/cumulusci/core/runtime.py index 51d05a3e16..d5b2ce3459 100644 --- a/cumulusci/core/runtime.py +++ b/cumulusci/core/runtime.py @@ -83,7 +83,8 @@ def _load_project_config(self, *args, **kwargs): self.project_config = self.project_config_cls( self.universal_config, *args, **kwargs ) - self.project_config._add_tasks_directory_to_python_path() + if self.project_config is not None: + self.project_config._add_tasks_directory_to_python_path() def _load_keychain(self): if self.keychain is not None: diff --git a/cumulusci/core/sfdx.py b/cumulusci/core/sfdx.py index d1c8fd01d7..3058fc80ac 100644 --- a/cumulusci/core/sfdx.py +++ b/cumulusci/core/sfdx.py @@ -35,17 +35,17 @@ def sfdx( Returns a `sarge` Command instance with returncode, stdout, stderr """ - command = f"sfdx {command}" + command = f"sf {command}" if args is not None: for arg in args: command += " " + shell_quote(arg) if username: - command += f" -u {shell_quote(username)}" + command += f" -o {shell_quote(username)}" if log_note: logger.info(f"{log_note} with command: {command}") # Avoid logging access token if access_token: - command += f" -u {shell_quote(access_token)}" + command += f" -o {shell_quote(access_token)}" env = env or {} p = sarge.Command( command, @@ -86,15 +86,15 @@ def shell_quote(s: str): def get_default_devhub_username(): p = sfdx( - "force:config:get defaultdevhubusername --json", + "config get target-dev-hub --json", log_note="Getting default Dev Hub username from sfdx", check_return=True, ) result = json.load(p.stdout_text) if "result" not in result or "value" not in result["result"][0]: raise SfdxOrgException( - "No sfdx config found for defaultdevhubusername. " - "Please use the sfdx force:config:set to set the defaultdevhubusername and run again." + "No sf config found for target-dev-hub. " + "Please use the sf config set to set the target-dev-hub and run again." ) username = result["result"][0]["value"] return username @@ -145,7 +145,7 @@ def convert_sfdx_source( if name: args += ["-n", name] sfdx( - "force:source:convert", + "project convert source", args=args, capture_output=True, check_return=True, diff --git a/cumulusci/core/tests/test_datasets_e2e.py b/cumulusci/core/tests/test_datasets_e2e.py index c5140d3609..387ad696ad 100644 --- a/cumulusci/core/tests/test_datasets_e2e.py +++ b/cumulusci/core/tests/test_datasets_e2e.py @@ -304,6 +304,7 @@ def write_yaml(filename: str, json: Any): "after": "Insert Account", } }, + "select_options": {}, }, "Insert Event": { "sf_object": "Event", @@ -316,16 +317,19 @@ def write_yaml(filename: str, json: Any): "after": "Insert Lead", } }, + "select_options": {}, }, "Insert Account": { "sf_object": "Account", "table": "Account", "fields": ["Name"], + "select_options": {}, }, "Insert Lead": { "sf_object": "Lead", "table": "Lead", "fields": ["Company", "LastName"], + "select_options": {}, }, } assert tuple(actual.items()) == tuple(expected.items()), actual.items() diff --git a/cumulusci/core/tests/test_sfdx.py b/cumulusci/core/tests/test_sfdx.py index 0d6661284e..205996f296 100644 --- a/cumulusci/core/tests/test_sfdx.py +++ b/cumulusci/core/tests/test_sfdx.py @@ -23,14 +23,15 @@ class TestSfdx: def test_posix_quoting(self, Command): sfdx("cmd", args=["a'b"]) cmd = Command.call_args[0][0] - assert cmd == r"sfdx cmd 'a'\''b'" + assert cmd == r"sf cmd 'a'\''b'" @mock.patch("platform.system", mock.Mock(return_value="Windows")) @mock.patch("sarge.Command") def test_windows_quoting(self, Command): sfdx("cmd", args=['a"b'], access_token="token") cmd = Command.call_args[0][0] - assert cmd == r'sfdx cmd "a\"b" -u token' + print(cmd) + assert cmd == r'sf cmd "a\"b" -o token' @mock.patch("platform.system", mock.Mock(return_value="Windows")) def test_shell_quote__str_with_space(self): @@ -93,7 +94,7 @@ def test_convert_sfdx(): assert p is not None sfdx.assert_called_once_with( - "force:source:convert", + "project convert source", args=["-d", mock.ANY, "-r", path, "-n", "Test Package"], capture_output=True, check_return=True, @@ -109,7 +110,7 @@ def test_convert_sfdx__cwd(): assert p is not None sfdx.assert_called_once_with( - "force:source:convert", + "project convert source", args=["-d", mock.ANY, "-n", "Test Package"], capture_output=True, check_return=True, diff --git a/cumulusci/cumulusci.yml b/cumulusci/cumulusci.yml index 6dd783257a..8259a6fa50 100644 --- a/cumulusci/cumulusci.yml +++ b/cumulusci/cumulusci.yml @@ -81,6 +81,14 @@ tasks: description: Waits on a batch apex or queueable apex job to finish. class_path: cumulusci.tasks.apex.batch.BatchApexWait group: Salesforce + check_components: + description: "Check if common components exist in the target org based on provided deploy paths or those from a plan/flow." + class_path: cumulusci.tasks.salesforce.check_components.CheckComponents + group: Salesforce Preflight Checks + check_dataset_load: + description: Runs as a preflight check to determine whether dataset can be loaded successfully. + class_path: cumulusci.tasks.preflight.dataset_load.LoadDataSetCheck + group: Salesforce Preflight Checks check_my_domain_active: description: Runs as a preflight check to determine whether My Domain is active. class_path: cumulusci.tasks.preflight.settings.CheckMyDomainActive @@ -263,14 +271,14 @@ tasks: path: unpackaged/config/qa group: Salesforce Metadata dx: - description: Execute an arbitrary Salesforce DX command against an org. Use the 'command' option to specify the command, such as 'force:package:install' + description: Execute an arbitrary Salesforce DX command against an org. Use the 'command' option to specify the command, such as 'package install' class_path: cumulusci.tasks.sfdx.SFDXOrgTask group: Salesforce DX dx_convert_to: description: Converts src directory metadata format into sfdx format under force-app class_path: cumulusci.tasks.sfdx.SFDXBaseTask options: - command: "force:mdapi:convert -r src" + command: "project convert mdapi -r src" group: Salesforce DX dx_convert_from: description: Converts force-app directory in sfdx format into metadata format under src @@ -278,18 +286,6 @@ tasks: options: src_dir: src group: Salesforce DX - dx_pull: - description: Uses sfdx to pull from a scratch org into the force-app directory - class_path: cumulusci.tasks.sfdx.SFDXOrgTask - options: - command: "force:source:pull" - group: Salesforce DX - dx_push: - description: Uses sfdx to push the force-app directory metadata into a scratch org - class_path: cumulusci.tasks.sfdx.SFDXOrgTask - options: - command: "force:source:push" - group: Salesforce DX enable_einstein_prediction: description: Enable an Einstein Prediction Builder prediction. class_path: cumulusci.tasks.salesforce.enable_prediction.EnablePrediction @@ -411,6 +407,10 @@ tasks: description: Prints the Community Templates available to the current org class_path: cumulusci.tasks.salesforce.ListCommunityTemplates group: Salesforce Communities + list_files: + description: Display documents that has been uploaded to a library in Salesforce CRM Content or Salesforce Files. + class_path: cumulusci.tasks.salesforce.salesforce_files.ListFiles + group: Salesforce Metadata list_metadata_types: description: Prints the metadata types in a project class_path: cumulusci.tasks.util.ListMetadataTypes @@ -539,6 +539,10 @@ tasks: group: Data Operations options: recipe: datasets/recipe.yml + retrieve_files: + description: Retrieve documents that have been uploaded to a library in Salesforce CRM Content or Salesforce Files. + class_path: cumulusci.tasks.salesforce.salesforce_files.RetrieveFiles + group: Salesforce Metadata revert_managed_src: description: Reverts the changes from create_managed_src class_path: cumulusci.tasks.metadata.managed_src.RevertManagedSrc @@ -662,6 +666,10 @@ tasks: description: Uploads a beta release of the metadata currently in the packaging org class_path: cumulusci.tasks.salesforce.PackageUpload group: Release Operations + upload_files: + description: Upload documents (files) to a Salesforce org. + class_path: cumulusci.tasks.salesforce.salesforce_files.UploadFiles + group: Salesforce Metadata upload_production: description: Uploads a production release of the metadata currently in the packaging org class_path: cumulusci.tasks.salesforce.PackageUpload @@ -1480,7 +1488,7 @@ project: namespace: install_class: uninstall_class: - api_version: "59.0" + api_version: "62.0" git: default_branch: master prefix_feature: feature/ diff --git a/cumulusci/oauth/salesforce.py b/cumulusci/oauth/salesforce.py index 9063a359d6..48b2e3e0e2 100644 --- a/cumulusci/oauth/salesforce.py +++ b/cumulusci/oauth/salesforce.py @@ -21,6 +21,19 @@ PROD_LOGIN_URL = os.environ.get("SF_PROD_LOGIN_URL") or "https://login.salesforce.com" +def update_login_urls(): + """ + Updating to setup the environment variables dynamically + """ + global PROD_LOGIN_URL, SANDBOX_LOGIN_URL + PROD_LOGIN_URL = ( + os.environ.get("SF_PROD_LOGIN_URL") or "https://login.salesforce.com" + ) + SANDBOX_LOGIN_URL = ( + os.environ.get("SF_SANDBOX_LOGIN_URL") or "https://test.salesforce.com" + ) + + def jwt_session( client_id, private_key, username, url=None, auth_url=None, is_sandbox=False ): @@ -31,6 +44,7 @@ def jwt_session( :param username: Username to authenticate as :param url: Org's instance_url """ + update_login_urls() if auth_url: aud = ( SANDBOX_LOGIN_URL diff --git a/cumulusci/oauth/tests/test_client.py b/cumulusci/oauth/tests/test_client.py index 9f0e939cb2..430b18e9ab 100644 --- a/cumulusci/oauth/tests/test_client.py +++ b/cumulusci/oauth/tests/test_client.py @@ -13,7 +13,10 @@ import responses from requests.models import Response -from cumulusci.core.exceptions import SalesforceCredentialsException +from cumulusci.core.exceptions import ( + CumulusCIUsageError, + SalesforceCredentialsException, +) from cumulusci.core.keychain.base_project_keychain import DEFAULT_CONNECTED_APP_PORT from cumulusci.oauth.client import ( PORT_IN_USE_ERR, @@ -72,9 +75,17 @@ def http_client(client_config): @contextmanager @mock.patch("time.sleep", time.sleep) # undo mock from conftest -def httpd_thread(oauth_client): +def httpd_thread(oauth_client, expected_error=None): # call OAuth object on another thread - this spawns local httpd - thread = threading.Thread(target=oauth_client.auth_code_flow) + + def run_code_and_check_exception(): + if expected_error: + with pytest.raises(expected_error): + oauth_client.auth_code_flow() + else: + oauth_client.auth_code_flow() + + thread = threading.Thread(target=run_code_and_check_exception) thread.start() while thread.is_alive(): if oauth_client.httpd: @@ -192,7 +203,7 @@ def test_oauth_flow_error_from_auth(self, client): ) # call OAuth object on another thread - this spawns local httpd - with httpd_thread(client): + with httpd_thread(client, OAuth2Error): # simulate callback from browser with pytest.raises(urllib.error.HTTPError): urllib.request.urlopen( @@ -204,7 +215,7 @@ def test_oauth_flow_error_from_auth(self, client): sys.platform.startswith("win"), reason="setup differs from windows" ) def test_create_httpd__port_already_in_use(self, client): - with httpd_thread(client): + with httpd_thread(client, CumulusCIUsageError): with pytest.raises( OAuth2Error, match=PORT_IN_USE_ERR.format(DEFAULT_CONNECTED_APP_PORT) ): @@ -227,7 +238,7 @@ def test_oauth_flow_error_from_token(self, client): ) # call OAuth object on another thread - this spawns local httpd - with httpd_thread(client): + with httpd_thread(client, OAuth2Error): # simulate callback from browser with pytest.raises(urllib.error.HTTPError): urllib.request.urlopen(client.client_config.redirect_uri + "?code=123") diff --git a/cumulusci/oauth/tests/test_salesforce.py b/cumulusci/oauth/tests/test_salesforce.py index 6ef9c11e84..091227900f 100644 --- a/cumulusci/oauth/tests/test_salesforce.py +++ b/cumulusci/oauth/tests/test_salesforce.py @@ -1,3 +1,4 @@ +import os from unittest import mock import pytest @@ -28,10 +29,12 @@ def test_jwt_session(encode): @mock.patch("cumulusci.oauth.salesforce.jwt.encode") def test_jwt_session__enhanced_domains_enabled(encode): # raise an assertion error if the registered url was not accessed + os.environ["SF_PROD_LOGIN_URL"] = "https://login.test1.salesforce.com" + os.environ["SF_SANDBOX_LOGIN_URL"] = "https://test.test1.salesforce.com" with responses.RequestsMock(assert_all_requests_are_fired=True) as rsps: rsps.add( responses.POST, - "https://test.salesforce.com/services/oauth2/token", + "https://test.test1.salesforce.com/services/oauth2/token", body='{"message":"well done mate!"}', status=200, ) diff --git a/cumulusci/robotframework/Salesforce.py b/cumulusci/robotframework/Salesforce.py index 139eae119d..99eebdb0d0 100644 --- a/cumulusci/robotframework/Salesforce.py +++ b/cumulusci/robotframework/Salesforce.py @@ -600,11 +600,11 @@ def select_record_type(self, label): self.selenium.click_button("Next") @capture_screenshot_on_error - def select_app_launcher_app(self, app_name): + def select_app_launcher_app(self, app_name, timeout=30): """Navigates to a Salesforce App via the App Launcher""" locator = lex_locators["app_launcher"]["app_link"].format(app_name) self.open_app_launcher() - self.selenium.wait_until_page_contains_element(locator, timeout=30) + self.selenium.wait_until_page_contains_element(locator, timeout) self.selenium.set_focus_to_element(locator) elem = self.selenium.get_webelement(locator) link = elem.find_element_by_xpath("../../..") @@ -623,19 +623,19 @@ def select_app_launcher_tab(self, tab_name): self.wait_until_modal_is_closed() @capture_screenshot_on_error - def wait_until_modal_is_open(self): + def wait_until_modal_is_open(self, timeout=15): """Wait for modal to open""" self.selenium.wait_until_page_contains_element( lex_locators["modal"]["is_open"], - timeout=15, + timeout, error="Expected to see a modal window, but didn't", ) @capture_screenshot_on_error - def wait_until_modal_is_closed(self): + def wait_until_modal_is_closed(self, timeout=15): """Wait for modal to close""" self.selenium.wait_until_page_does_not_contain_element( - lex_locators["modal"]["is_open"], timeout=15 + lex_locators["modal"]["is_open"], timeout ) @capture_screenshot_on_error diff --git a/cumulusci/robotframework/form_handlers.py b/cumulusci/robotframework/form_handlers.py index ceabfcb39c..bc79749057 100644 --- a/cumulusci/robotframework/form_handlers.py +++ b/cumulusci/robotframework/form_handlers.py @@ -160,7 +160,12 @@ def clear(self): class LightningInputHandler(BaseFormHandler): """An input handler for components that can be treated as an input or textarea""" - tags = ["lightning-input", "lightning-textarea", "lightning-datepicker"] + tags = [ + "lightning-primitive-input-checkbox", + "lightning-primitive-input-simple", + "lightning-textarea", + "lightning-datepicker", + ] def set(self, value): self.focus() diff --git a/cumulusci/tasks/apex/tests/test_apex_tasks.py b/cumulusci/tasks/apex/tests/test_apex_tasks.py index 7c9079310b..263f5fd265 100644 --- a/cumulusci/tasks/apex/tests/test_apex_tasks.py +++ b/cumulusci/tasks/apex/tests/test_apex_tasks.py @@ -8,6 +8,7 @@ import pytest import responses +from responses.matchers import query_string_matcher from simple_salesforce import SalesforceGeneralError from cumulusci.core import exceptions as exc @@ -73,9 +74,9 @@ def setup_method(self): def _mock_apex_class_query(self, name="TestClass_TEST", namespace=None): namespace_param = "null" if namespace is None else f"%27{namespace}%27" - url = ( - self.base_tooling_url - + "query/?q=SELECT+Id%2C+Name+" + url = self.base_tooling_url + "query/" + query_string = ( + "q=SELECT+Id%2C+Name+" + f"FROM+ApexClass+WHERE+NamespacePrefix+%3D+{namespace_param}" + "+AND+%28Name+LIKE+%27%25_TEST%27%29" ) @@ -85,7 +86,10 @@ def _mock_apex_class_query(self, name="TestClass_TEST", namespace=None): "totalSize": 1, } responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _get_mock_test_query_results(self, methodnames, outcomes, messages): @@ -163,16 +167,14 @@ def _get_mock_test_query_results(self, methodnames, outcomes, messages): def _get_mock_test_query_url(self, job_id): return ( - self.base_tooling_url - + "query/?q=%0ASELECT+Id%2CApexClassId%2CTestTimestamp%2C%0A+++++++Message%2CMethodName%2COutcome%2C%0A+++++++RunTime%2CStackTrace%2C%0A+++++++%28SELECT%0A++++++++++Id%2CCallouts%2CAsyncCalls%2CDmlRows%2CEmail%2C%0A++++++++++LimitContext%2CLimitExceptions%2CMobilePush%2C%0A++++++++++QueryRows%2CSosl%2CCpu%2CDml%2CSoql%0A++++++++FROM+ApexTestResults%29%0AFROM+ApexTestResult%0AWHERE+AsyncApexJobId%3D%27{}%27%0A".format( - job_id - ) + self.base_tooling_url + "query/", + f"q=%0ASELECT+Id%2CApexClassId%2CTestTimestamp%2C%0A+++++++Message%2CMethodName%2COutcome%2C%0A+++++++RunTime%2CStackTrace%2C%0A+++++++%28SELECT%0A++++++++++Id%2CCallouts%2CAsyncCalls%2CDmlRows%2CEmail%2C%0A++++++++++LimitContext%2CLimitExceptions%2CMobilePush%2C%0A++++++++++QueryRows%2CSosl%2CCpu%2CDml%2CSoql%0A++++++++FROM+ApexTestResults%29%0AFROM+ApexTestResult%0AWHERE+AsyncApexJobId%3D%27{job_id}%27%0A", ) def _get_mock_testqueueitem_status_query_url(self, job_id): return ( - self.base_tooling_url - + f"query/?q=SELECT+Id%2C+Status%2C+ExtendedStatus%2C+ApexClassId+FROM+ApexTestQueueItem+WHERE+ParentJobId+%3D+%27{job_id}%27+AND+Status+%3D+%27Failed%27" + (self.base_tooling_url + "query/"), + f"q=SELECT+Id%2C+Status%2C+ExtendedStatus%2C+ApexClassId+FROM+ApexTestQueueItem+WHERE+ParentJobId+%3D+%27{job_id}%27+AND+Status+%3D+%27Failed%27", ) def _mock_get_test_results( @@ -182,44 +184,50 @@ def _mock_get_test_results( job_id="JOB_ID1234567", methodname=["TestMethod"], ): - url = self._get_mock_test_query_url(job_id) + url, query_string = self._get_mock_test_query_url(job_id) expected_response = self._get_mock_test_query_results( methodname, [outcome], [message] ) responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _mock_get_test_results_multiple( self, method_names, outcomes, messages, job_id="JOB_ID1234567" ): - url = self._get_mock_test_query_url(job_id) + url, query_string = self._get_mock_test_query_url(job_id) expected_response = self._get_mock_test_query_results( method_names, outcomes, messages ) responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _mock_get_failed_test_classes(self, job_id="JOB_ID1234567"): - url = self._get_mock_testqueueitem_status_query_url(job_id) + url, query_string = self._get_mock_testqueueitem_status_query_url(job_id) responses.add( responses.GET, url, - match_querystring=True, + match=[query_string_matcher(query_string)], json={"totalSize": 0, "records": [], "done": True}, ) def _mock_get_failed_test_classes_failure(self, job_id="JOB_ID1234567"): - url = self._get_mock_testqueueitem_status_query_url(job_id) + url, query_string = self._get_mock_testqueueitem_status_query_url(job_id) responses.add( responses.GET, url, - match_querystring=True, + match=[query_string_matcher(query_string)], json={ "totalSize": 1, "records": [ @@ -235,14 +243,15 @@ def _mock_get_failed_test_classes_failure(self, job_id="JOB_ID1234567"): ) def _mock_get_symboltable(self): - url = ( - self.base_tooling_url - + "query/?q=SELECT+SymbolTable+FROM+ApexClass+WHERE+Name%3D%27TestClass_TEST%27" + url = self.base_tooling_url + "query/" + query_string = ( + "q=SELECT+SymbolTable+FROM+ApexClass+WHERE+Name%3D%27TestClass_TEST%27" ) responses.add( responses.GET, url, + match=[query_string_matcher(query_string)], json={ "records": [ { @@ -265,9 +274,9 @@ def _mock_get_symboltable_failure(self): responses.add(responses.GET, url, json={"records": []}) def _mock_tests_complete(self, job_id="JOB_ID1234567"): - url = ( - self.base_tooling_url - + "query/?q=SELECT+Id%2C+Status%2C+" + url = self.base_tooling_url + "query/" + query_string = ( + "q=SELECT+Id%2C+Status%2C+" + "ApexClassId+FROM+ApexTestQueueItem+WHERE+ParentJobId+%3D+%27" + "{}%27".format(job_id) ) @@ -277,15 +286,18 @@ def _mock_tests_complete(self, job_id="JOB_ID1234567"): "records": [{"Status": "Completed"}], } responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _mock_tests_processing(self, job_id="JOB_ID1234567"): - url = ( - self.base_tooling_url - + "query/?q=SELECT+Id%2C+Status%2C+" + url = self.base_tooling_url + "query/" + query_string = ( + "q=SELECT+Id%2C+Status%2C+" + "ApexClassId+FROM+ApexTestQueueItem+WHERE+ParentJobId+%3D+%27" - + "{}%27".format(job_id) + + f"{job_id}%27" ) expected_response = { "done": True, @@ -293,7 +305,10 @@ def _mock_tests_processing(self, job_id="JOB_ID1234567"): "records": [{"Status": "Processing", "ApexClassId": 1}], } responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _mock_run_tests(self, success=True, body="JOB_ID1234567"): diff --git a/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py b/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py index 95d6b9ff97..cec42d0bd9 100644 --- a/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py +++ b/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py @@ -5,7 +5,7 @@ from pydantic import Field, validator from cumulusci.core.enums import StrEnum -from cumulusci.tasks.bulkdata.step import DataApi +from cumulusci.tasks.bulkdata.utils import DataApi from cumulusci.utils.yaml.model_parser import CCIDictModel, HashableBaseModel object_decl = re.compile(r"objects\((\w+)\)", re.IGNORECASE) diff --git a/cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py b/cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py index 7dbaefc740..69dd0e361d 100644 --- a/cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py +++ b/cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py @@ -41,6 +41,7 @@ def test_simple_generate_mapping_from_declarations(self, org_config): "sf_object": "Account", "table": "Account", "fields": ["Name", "Description"], + "select_options": {}, } } @@ -74,11 +75,13 @@ def test_generate_mapping_from_both_kinds_of_declarations(self, org_config): "sf_object": "Contact", "table": "Contact", "fields": ["FirstName", "LastName"], + "select_options": {}, }, "Insert Account": { "sf_object": "Account", "table": "Account", "fields": ["Name", "Description"], + "select_options": {}, }, }.items() ) @@ -111,6 +114,7 @@ def test_generate_load_mapping_from_declarations__lookups(self, org_config): "sf_object": "Account", "table": "Account", "fields": ["Name", "Description"], + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", @@ -119,6 +123,7 @@ def test_generate_load_mapping_from_declarations__lookups(self, org_config): "lookups": { "AccountId": {"table": ["Account"], "key_field": "AccountId"} }, + "select_options": {}, }, } @@ -157,6 +162,7 @@ def test_generate_load_mapping_from_declarations__polymorphic_lookups( "sf_object": "Account", "table": "Account", "fields": ["Name", "Description"], + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", @@ -165,11 +171,13 @@ def test_generate_load_mapping_from_declarations__polymorphic_lookups( "lookups": { "AccountId": {"table": ["Account"], "key_field": "AccountId"} }, + "select_options": {}, }, "Insert Lead": { "sf_object": "Lead", "table": "Lead", "fields": ["LastName", "Company"], + "select_options": {}, }, "Insert Event": { "sf_object": "Event", @@ -178,6 +186,7 @@ def test_generate_load_mapping_from_declarations__polymorphic_lookups( "lookups": { "WhoId": {"table": ["Contact", "Lead"], "key_field": "WhoId"} }, + "select_options": {}, }, } @@ -221,6 +230,7 @@ def test_generate_load_mapping_from_declarations__circular_lookups( }, "sf_object": "Account", "table": "Account", + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", @@ -229,6 +239,7 @@ def test_generate_load_mapping_from_declarations__circular_lookups( "lookups": { "AccountId": {"table": ["Account"], "key_field": "AccountId"} }, + "select_options": {}, }, }, mf @@ -252,11 +263,13 @@ def test_generate_load_mapping__with_load_declarations(self, org_config): "sf_object": "Account", "api": DataApi.REST, "table": "Account", + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", "api": DataApi.BULK, "table": "Contact", + "select_options": {}, }, }, mf @@ -288,6 +301,7 @@ def test_generate_load_mapping__with_upserts(self, org_config): "Insert Account": { "sf_object": "Account", "table": "Account", + "select_options": {}, }, "Upsert Account Name": { "sf_object": "Account", @@ -295,6 +309,7 @@ def test_generate_load_mapping__with_upserts(self, org_config): "action": DataOperationType.UPSERT, "update_key": ("Name",), "fields": ["Name"], + "select_options": {}, }, "Etl_Upsert Account AccountNumber_Name": { "sf_object": "Account", @@ -302,10 +317,12 @@ def test_generate_load_mapping__with_upserts(self, org_config): "action": DataOperationType.ETL_UPSERT, "update_key": ("AccountNumber", "Name"), "fields": ["AccountNumber", "Name"], + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", "table": "Contact", + "select_options": {}, }, }, mf diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index 4ae0dcf31a..0732d57777 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -27,6 +27,7 @@ AddMappingFiltersToQuery, AddPersonAccountsToQuery, AddRecordTypesToQuery, + DynamicLookupQueryExtender, ) from cumulusci.tasks.bulkdata.step import ( DEFAULT_BULK_BATCH_SIZE, @@ -289,7 +290,12 @@ def _execute_step( self, step, self._stream_queried_data(mapping, local_ids, query) ) step.start() - step.load_records(self._stream_queried_data(mapping, local_ids, query)) + if mapping.action == DataOperationType.SELECT: + step.select_records( + self._stream_queried_data(mapping, local_ids, query) + ) + else: + step.load_records(self._stream_queried_data(mapping, local_ids, query)) step.end() # Process Job Results @@ -304,10 +310,108 @@ def _execute_step( return step.job_result + def process_lookup_fields(self, mapping, fields, polymorphic_fields): + """Modify fields and priority fields based on lookup and polymorphic checks.""" + # Store the lookups and their original order for re-insertion at the end + original_lookups = [name for name in fields if name in mapping.lookups] + max_insert_index = -1 + for name, lookup in mapping.lookups.items(): + if name in fields: + # Get the index of the lookup field before removing it + insert_index = fields.index(name) + max_insert_index = max(max_insert_index, insert_index) + # Remove the lookup field from fields + fields.remove(name) + + # Do the same for priority fields + lookup_in_priority_fields = False + if name in mapping.select_options.priority_fields: + # Set flag to True + lookup_in_priority_fields = True + # Remove the lookup field from priority fields + del mapping.select_options.priority_fields[name] + + # Check if this lookup field is polymorphic + if ( + name in polymorphic_fields + and len(polymorphic_fields[name]["referenceTo"]) > 1 + ): + # Convert to list if string + if not isinstance(lookup.table, list): + lookup.table = [lookup.table] + # Polymorphic field handling + polymorphic_references = lookup.table + relationship_name = polymorphic_fields[name]["relationshipName"] + + # Loop through each polymorphic type (e.g., Contact, Lead) + for ref_type in polymorphic_references: + # Find the mapping step for this polymorphic type + lookup_mapping_step = next( + ( + step + for step in self.mapping.values() + if step.table == ref_type + ), + None, + ) + if lookup_mapping_step: + lookup_fields = lookup_mapping_step.fields.keys() + # Insert fields in the format {relationship_name}.{ref_type}.{lookup_field} + for field in lookup_fields: + fields.insert( + insert_index, + f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}", + ) + insert_index += 1 + max_insert_index = max(max_insert_index, insert_index) + if lookup_in_priority_fields: + mapping.select_options.priority_fields[ + f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}" + ] = f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}" + + else: + # Non-polymorphic field handling + lookup_table = lookup.table + + if isinstance(lookup_table, list): + lookup_table = lookup_table[0] + + # Get the mapping step for the non-polymorphic reference + lookup_mapping_step = next( + ( + step + for step in self.mapping.values() + if step.table == lookup_table + ), + None, + ) + + if lookup_mapping_step: + relationship_name = polymorphic_fields[name]["relationshipName"] + lookup_fields = lookup_mapping_step.fields.keys() + + # Insert the new fields at the same position as the removed lookup field + for field in lookup_fields: + fields.insert(insert_index, f"{relationship_name}.{field}") + insert_index += 1 + max_insert_index = max(max_insert_index, insert_index) + if lookup_in_priority_fields: + mapping.select_options.priority_fields[ + f"{relationship_name}.{field}" + ] = f"{relationship_name}.{field}" + + # Append the original lookups at the end in the same order + for name in original_lookups: + if name not in fields: + fields.insert(max_insert_index, name) + max_insert_index += 1 + def configure_step(self, mapping): """Create a step appropriate to the action""" bulk_mode = mapping.bulk_mode or self.bulk_mode or "Parallel" api_options = {"batch_size": mapping.batch_size, "bulk_mode": bulk_mode} + num_records_in_target = None + content_type = None fields = mapping.get_load_field_list() @@ -336,11 +440,45 @@ def configure_step(self, mapping): self.check_simple_upsert(mapping) api_options["update_key"] = mapping.update_key[0] action = DataOperationType.UPSERT + elif mapping.action == DataOperationType.SELECT: + # Set content type to json + content_type = "JSON" + # Bulk process expects DataOpertionType to be QUERY + action = DataOperationType.QUERY + # Determine number of records in the target org + record_count_response = self.sf.restful( + f"limits/recordCount?sObjects={mapping.sf_object}" + ) + sobject_map = { + entry["name"]: entry["count"] + for entry in record_count_response["sObjects"] + } + num_records_in_target = sobject_map.get(mapping.sf_object, None) + + # Check for similarity selection strategy and modify fields accordingly + if mapping.select_options.strategy == "similarity": + # Describe the object to determine polymorphic lookups + describe_result = self.sf.restful( + f"sobjects/{mapping.sf_object}/describe" + ) + polymorphic_fields = { + field["name"]: field + for field in describe_result["fields"] + if field["type"] == "reference" + } + self.process_lookup_fields(mapping, fields, polymorphic_fields) else: action = mapping.action query = self._query_db(mapping) + # Set volume + volume = ( + num_records_in_target + if num_records_in_target is not None + else query.count() + ) + step = get_dml_operation( sobject=mapping.sf_object, operation=action, @@ -348,7 +486,12 @@ def configure_step(self, mapping): context=self, fields=fields, api=mapping.api, - volume=query.count(), + volume=volume, + selection_strategy=mapping.select_options.strategy, + selection_filter=mapping.select_options.filter, + selection_priority_fields=mapping.select_options.priority_fields, + content_type=content_type, + threshold=mapping.select_options.threshold, ) return step, query @@ -448,9 +591,20 @@ def _query_db(self, mapping): AddMappingFiltersToQuery, AddUpsertsToQuery, ] - transformers = [ + transformers = [] + if ( + mapping.action == DataOperationType.SELECT + and mapping.select_options.strategy == "similarity" + ): + transformers.append( + DynamicLookupQueryExtender( + mapping, self.mapping, self.metadata, model, self._old_format + ) + ) + transformers.append( AddLookupsToQuery(mapping, self.metadata, model, self._old_format) - ] + ) + transformers.extend([cls(mapping, self.metadata, model) for cls in classes]) if mapping.sf_object == "Contact" and self._can_load_person_accounts(mapping): @@ -481,10 +635,11 @@ def _process_job_results(self, mapping, step, local_ids): """Get the job results and process the results. If we're raising for row-level errors, do so; if we're inserting, store the new Ids.""" - is_insert_or_upsert = mapping.action in ( + is_insert_upsert_or_select = mapping.action in ( DataOperationType.INSERT, DataOperationType.UPSERT, DataOperationType.ETL_UPSERT, + DataOperationType.SELECT, ) conn = self.session.connection() @@ -500,7 +655,7 @@ def _process_job_results(self, mapping, step, local_ids): break # If we know we have no successful inserts, don't attempt to persist Ids. # Do, however, drain the generator to get error-checking behavior. - if is_insert_or_upsert and ( + if is_insert_upsert_or_select and ( step.job_result.records_processed - step.job_result.total_row_errors ): table = self.metadata.tables[self.ID_TABLE_NAME] @@ -516,7 +671,7 @@ def _process_job_results(self, mapping, step, local_ids): # person account Contact records so lookups to # person account Contact records get populated downstream as expected. if ( - is_insert_or_upsert + is_insert_upsert_or_select and mapping.sf_object == "Contact" and self._can_load_person_accounts(mapping) ): @@ -531,7 +686,7 @@ def _process_job_results(self, mapping, step, local_ids): ), ) - if is_insert_or_upsert: + if is_insert_upsert_or_select: self.session.commit() def _generate_results_id_map(self, step, local_ids): diff --git a/cumulusci/tasks/bulkdata/mapping_parser.py b/cumulusci/tasks/bulkdata/mapping_parser.py index fd390a1a23..59c7d630a2 100644 --- a/cumulusci/tasks/bulkdata/mapping_parser.py +++ b/cumulusci/tasks/bulkdata/mapping_parser.py @@ -8,33 +8,21 @@ from typing import IO, Any, Callable, Dict, List, Mapping, Optional, Tuple, Union from pydantic import Field, ValidationError, root_validator, validator -from requests.structures import CaseInsensitiveDict as RequestsCaseInsensitiveDict from simple_salesforce import Salesforce from typing_extensions import Literal from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.tasks.bulkdata.dates import iso_to_date +from cumulusci.tasks.bulkdata.select_utils import SelectOptions, SelectStrategy from cumulusci.tasks.bulkdata.step import DataApi, DataOperationType +from cumulusci.tasks.bulkdata.utils import CaseInsensitiveDict from cumulusci.utils import convert_to_snake_case from cumulusci.utils.yaml.model_parser import CCIDictModel logger = getLogger(__name__) -class CaseInsensitiveDict(RequestsCaseInsensitiveDict): - def __init__(self, *args, **kwargs): - self._canonical_keys = {} - super().__init__(*args, **kwargs) - - def canonical_key(self, name): - return self._canonical_keys[name.lower()] - - def __setitem__(self, key, value): - super().__setitem__(key, value) - self._canonical_keys[key.lower()] = key - - class MappingLookup(CCIDictModel): "Lookup relationship between two tables." table: Union[str, List[str]] # Support for polymorphic lookups @@ -43,6 +31,7 @@ class MappingLookup(CCIDictModel): join_field: Optional[str] = None after: Optional[str] = None aliased_table: Optional[Any] = None + parent_tables: Optional[Any] = None name: Optional[str] = None # populated by parent def get_lookup_key_field(self, model=None): @@ -107,6 +96,9 @@ class MappingStep(CCIDictModel): ] = None # default should come from task options anchor_date: Optional[Union[str, date]] = None soql_filter: Optional[str] = None # soql_filter property + select_options: Optional[SelectOptions] = Field( + default_factory=lambda: SelectOptions(strategy=SelectStrategy.STANDARD) + ) update_key: T.Union[str, T.Tuple[str, ...]] = () # only for upserts @validator("bulk_mode", "api", "action", pre=True) @@ -129,6 +121,27 @@ def split_update_key(cls, val): ), "`update_key` should be a field name or list of field names." assert False, "Should be unreachable" # pragma: no cover + @root_validator + def validate_priority_fields(cls, values): + select_options = values.get("select_options") + fields_ = values.get("fields_", {}) + lookups = values.get("lookups", {}) + + if select_options and select_options.priority_fields: + priority_field_names = set(select_options.priority_fields.keys()) + field_names = set(fields_.keys()) + lookup_names = set(lookups.keys()) + + # Check if all priority fields are present in the fields + missing_fields = priority_field_names - field_names + missing_fields = missing_fields - lookup_names + if missing_fields: + raise ValueError( + f"Priority fields {missing_fields} are not present in 'fields' or 'lookups'" + ) + + return values + def get_oid_as_pk(self): """Returns True if using Salesforce Ids as primary keys.""" return "Id" in self.fields @@ -478,6 +491,27 @@ def _validate_sobject( return True + def check_required(self, fields_describe): + required_fields = set() + for field in fields_describe: + defaulted = ( + fields_describe[field]["defaultValue"] is not None + or fields_describe[field]["nillable"] + or fields_describe[field]["defaultedOnCreate"] + ) + if fields_describe[field]["createable"] and not defaulted: + required_fields.add(field) + missing_fields = required_fields.difference( + set(self.fields.keys()) | set(self.lookups) + ) + if len(missing_fields) > 0: + logger.error( + f"One or more required fields are missing for loading on {self.sf_object} :{missing_fields}" + ) + return False + else: + return True + def validate_and_inject_namespace( self, sf: Salesforce, @@ -485,6 +519,7 @@ def validate_and_inject_namespace( operation: DataOperationType, inject_namespaces: bool = False, drop_missing: bool = False, + is_load: bool = False, ): """Process the schema elements in this step. @@ -516,7 +551,6 @@ def strip(element: str): global_describe = CaseInsensitiveDict( {entry["name"]: entry for entry in sf.describe()["sobjects"]} ) - if not self._validate_sobject(global_describe, inject, strip, operation): # Don't attempt to validate field permissions if the object doesn't exist. return False @@ -524,7 +558,6 @@ def strip(element: str): # Validate, inject, and drop (if configured) fields. # By this point, we know the attribute is valid. describe = self.describe_data(sf) - fields_correct = self._validate_field_dict( describe, self.fields, inject, strip, drop_missing, operation ) @@ -533,6 +566,10 @@ def strip(element: str): describe, self.lookups, inject, strip, drop_missing, operation ) + if is_load: + # Show warning logs for unspecified required fields + self.check_required(describe) + if not (fields_correct and lookups_correct): return False @@ -649,7 +686,9 @@ def _infer_and_validate_lookups(mapping: Dict, sf: Salesforce): if len(target_objects) == 1: # This is a non-polymorphic lookup. target_index = list(sf_objects.values()).index(target_objects[0]) - if target_index > idx or target_index == idx: + if ( + target_index > idx or target_index == idx + ) and m.action != DataOperationType.SELECT: # This is a non-polymorphic after step. lookup.after = list(mapping.keys())[idx] else: @@ -687,7 +726,7 @@ def validate_and_inject_mapping( should_continue = [ m.validate_and_inject_namespace( - sf, namespace, data_operation, inject_namespaces, drop_missing + sf, namespace, data_operation, inject_namespaces, drop_missing, is_load ) for m in mapping.values() ] @@ -696,12 +735,12 @@ def validate_and_inject_mapping( raise BulkDataException( "One or more schema or permissions errors blocked the operation.\n" "If you would like to attempt the load regardless, you can specify " - "'--drop_missing_schema True' on the command." + "'--drop_missing_schema True' on the command option and ensure all required fields are included in the mapping file." ) if drop_missing: # Drop any steps with sObjects that are not present. - for (include, step_name) in zip(should_continue, list(mapping.keys())): + for include, step_name in zip(should_continue, list(mapping.keys())): if not include: del mapping[step_name] diff --git a/cumulusci/tasks/bulkdata/query_transformers.py b/cumulusci/tasks/bulkdata/query_transformers.py index 1618d813e4..181736a4bc 100644 --- a/cumulusci/tasks/bulkdata/query_transformers.py +++ b/cumulusci/tasks/bulkdata/query_transformers.py @@ -1,8 +1,9 @@ import typing as T from functools import cached_property -from sqlalchemy import and_, func, text +from sqlalchemy import String, and_, func, text from sqlalchemy.orm import Query, aliased +from sqlalchemy.sql import literal_column from cumulusci.core.exceptions import BulkDataException @@ -75,7 +76,7 @@ def join_for_lookup(lookup): return ( lookup.aliased_table, lookup.aliased_table.columns.id - == str(lookup.table) + "-" + value_column, + == str(lookup.table) + "-" + func.cast(value_column, String), ) else: return ( @@ -86,6 +87,81 @@ def join_for_lookup(lookup): return [join_for_lookup(lookup) for lookup in self.lookups] +class DynamicLookupQueryExtender(LoadQueryExtender): + """Dynamically adds columns and joins for all fields in lookup tables, handling polymorphic lookups""" + + def __init__( + self, mapping, all_mappings, metadata, model, _old_format: bool + ) -> None: + super().__init__(mapping, metadata, model) + self._old_format = _old_format + self.all_mappings = all_mappings + self.lookups = [ + lookup for lookup in self.mapping.lookups.values() if not lookup.after + ] + + @cached_property + def columns_to_add(self): + """Add all relevant fields from lookup tables directly without CASE, with support for polymorphic lookups.""" + columns = [] + for lookup in self.lookups: + tables = lookup.table if isinstance(lookup.table, list) else [lookup.table] + lookup.parent_tables = [ + aliased( + self.metadata.tables[table], name=f"{lookup.name}_{table}_alias" + ) + for table in tables + ] + + for parent_table, table_name in zip(lookup.parent_tables, tables): + # Find the mapping step for this polymorphic type + lookup_mapping_step = next( + ( + step + for step in self.all_mappings.values() + if step.table == table_name + ), + None, + ) + if lookup_mapping_step: + load_fields = lookup_mapping_step.fields.keys() + for field in load_fields: + if field in lookup_mapping_step.fields: + matching_column = next( + ( + col + for col in parent_table.columns + if col.name == lookup_mapping_step.fields[field] + ) + ) + columns.append( + matching_column.label(f"{parent_table.name}_{field}") + ) + else: + # Append an empty string if the field is not present + columns.append( + literal_column("''").label( + f"{parent_table.name}_{field}" + ) + ) + return columns + + @cached_property + def outerjoins_to_add(self): + """Add outer joins for each lookup table directly, including handling for polymorphic lookups.""" + + def join_for_lookup(lookup, parent_table): + key_field = lookup.get_lookup_key_field(self.model) + value_column = getattr(self.model, key_field) + return (parent_table, parent_table.columns.id == value_column) + + joins = [] + for lookup in self.lookups: + for parent_table in lookup.parent_tables: + joins.append(join_for_lookup(lookup, parent_table)) + return joins + + class AddRecordTypesToQuery(LoadQueryExtender): """Adds columns, joins and filters relatinng to recordtypes""" diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py new file mode 100644 index 0000000000..7412a38ae4 --- /dev/null +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -0,0 +1,769 @@ +import random +import re +import typing as T +from enum import Enum + +import numpy as np +import pandas as pd +from annoy import AnnoyIndex +from pydantic import Field, root_validator, validator +from sklearn.feature_extraction.text import HashingVectorizer +from sklearn.preprocessing import StandardScaler + +from cumulusci.core.enums import StrEnum +from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( + DEFAULT_DECLARATIONS, +) +from cumulusci.tasks.bulkdata.utils import CaseInsensitiveDict +from cumulusci.utils.yaml.model_parser import CCIDictModel + + +class SelectStrategy(StrEnum): + """Enum defining the different selection strategies requested.""" + + STANDARD = "standard" + SIMILARITY = "similarity" + RANDOM = "random" + + +class SelectRecordRetrievalMode(StrEnum): + """Enum defining whether you need all records or match the + number of records of the local sql file""" + + ALL = "all" + MATCH = "match" + + +ENUM_VALUES = { + v.value.lower(): v.value + for enum in [SelectStrategy] + for v in enum.__members__.values() +} + + +class SelectOptions(CCIDictModel): + filter: T.Optional[str] = None # Optional filter for selection + strategy: SelectStrategy = SelectStrategy.STANDARD # Strategy for selection + priority_fields: T.Dict[str, str] = Field({}) + threshold: T.Optional[float] = None + + @validator("strategy", pre=True) + def validate_strategy(cls, value): + if isinstance(value, Enum): + return value + + if value: + matched_strategy = ENUM_VALUES.get(value.lower()) + if matched_strategy: + return matched_strategy + + raise ValueError(f"Invalid strategy value: {value}") + + @validator("priority_fields", pre=True) + def standardize_fields_to_dict(cls, values): + if values is None: + values = {} + if type(values) is list: + values = {elem: elem for elem in values} + return CaseInsensitiveDict(values) + + @root_validator + def validate_threshold_and_strategy(cls, values): + threshold = values.get("threshold") + strategy = values.get("strategy") + + if threshold is not None: + values["threshold"] = float(threshold) # Convert to float + + if not (0 <= values["threshold"] <= 1): + raise ValueError( + f"Threshold must be between 0 and 1, got {values['threshold']}." + ) + + if strategy != SelectStrategy.SIMILARITY: + raise ValueError( + "If a threshold is specified, the strategy must be set to 'similarity'." + ) + + return values + + +class SelectOperationExecutor: + def __init__(self, strategy: SelectStrategy): + self.strategy = strategy + self.retrieval_mode = ( + SelectRecordRetrievalMode.ALL + if strategy == SelectStrategy.SIMILARITY + else SelectRecordRetrievalMode.MATCH + ) + + def select_generate_query( + self, + sobject: str, + fields: T.List[str], + user_filter: str, + limit: T.Union[int, None], + offset: T.Union[int, None], + ): + _, select_fields = split_and_filter_fields(fields=fields) + # For STANDARD strategy + if self.strategy == SelectStrategy.STANDARD: + return standard_generate_query( + sobject=sobject, user_filter=user_filter, limit=limit, offset=offset + ) + # For SIMILARITY strategy + elif self.strategy == SelectStrategy.SIMILARITY: + return similarity_generate_query( + sobject=sobject, + fields=select_fields, + user_filter=user_filter, + limit=limit, + offset=offset, + ) + # For RANDOM strategy + elif self.strategy == SelectStrategy.RANDOM: + return standard_generate_query( + sobject=sobject, user_filter=user_filter, limit=limit, offset=offset + ) + + def select_post_process( + self, + load_records, + query_records: list, + fields: list, + num_records: int, + sobject: str, + weights: list, + threshold: T.Union[float, None], + ): + # For STANDARD strategy + if self.strategy == SelectStrategy.STANDARD: + return standard_post_process( + query_records=query_records, num_records=num_records, sobject=sobject + ) + # For SIMILARITY strategy + elif self.strategy == SelectStrategy.SIMILARITY: + return similarity_post_process( + load_records=load_records, + query_records=query_records, + fields=fields, + sobject=sobject, + weights=weights, + threshold=threshold, + ) + # For RANDOM strategy + elif self.strategy == SelectStrategy.RANDOM: + return random_post_process( + query_records=query_records, num_records=num_records, sobject=sobject + ) + + +def standard_generate_query( + sobject: str, + user_filter: str, + limit: T.Union[int, None], + offset: T.Union[int, None], +) -> T.Tuple[str, T.List[str]]: + """Generates the SOQL query for the standard (as well as random) selection strategy""" + + query = f"SELECT Id FROM {sobject}" + # If user specifies user_filter + if user_filter: + query += add_limit_offset_to_user_filter( + filter_clause=user_filter, limit_clause=limit, offset_clause=offset + ) + else: + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(sobject) + if declaration: + query += f" WHERE {declaration.where}" + query += f" LIMIT {limit}" if limit else "" + query += f" OFFSET {offset}" if offset else "" + return query, ["Id"] + + +def standard_post_process( + query_records: list, num_records: int, sobject: str +) -> T.Tuple[T.List[dict], None, T.Union[str, None]]: + """Processes the query results for the standard selection strategy""" + # Handle case where query returns 0 records + if not query_records: + error_message = f"No records found for {sobject} in the target org." + return [], None, error_message + + # Add 'success: True' to each record to emulate records have been inserted + selected_records = [ + {"id": record[0], "success": True, "created": False} for record in query_records + ] + + # If fewer records than requested, repeat existing records to match num_records + if len(selected_records) < num_records: + original_records = selected_records.copy() + while len(selected_records) < num_records: + selected_records.extend(original_records) + selected_records = selected_records[:num_records] + + return selected_records, None, None # Return selected records and None for error + + +def similarity_generate_query( + sobject: str, + fields: T.List[str], + user_filter: str, + limit: T.Union[int, None], + offset: T.Union[int, None], +) -> T.Tuple[str, T.List[str]]: + """Generates the SOQL query for the similarity selection strategy, with support for TYPEOF on polymorphic fields.""" + + # Pre-process the new fields format to create a nested dict structure for TYPEOF clauses + nested_fields = {} + regular_fields = [] + + for field in fields: + components = field.split(".") + if len(components) >= 3: + # Handle polymorphic fields (format: {relationship_name}.{ref_obj}.{ref_field}) + relationship, ref_obj, ref_field = ( + components[0], + components[1], + components[2], + ) + if relationship not in nested_fields: + nested_fields[relationship] = {} + if ref_obj not in nested_fields[relationship]: + nested_fields[relationship][ref_obj] = [] + nested_fields[relationship][ref_obj].append(ref_field) + else: + # Handle regular fields (format: {field}) + regular_fields.append(field) + + # Construct the query fields + query_fields = [] + + # Build TYPEOF clauses for polymorphic fields + for relationship, references in nested_fields.items(): + type_clauses = [] + for ref_obj, ref_fields in references.items(): + fields_clause = ", ".join(ref_fields) + type_clauses.append(f"WHEN {ref_obj} THEN {fields_clause}") + type_clause = f"TYPEOF {relationship} {' '.join(type_clauses)} ELSE Id END" + query_fields.append(type_clause) + + # Add regular fields to the query + query_fields.extend(regular_fields) + + # Ensure "Id" is included in the fields list for identification + if "Id" not in query_fields: + query_fields.insert(0, "Id") + + # Build the main SOQL query + fields_to_query = ", ".join(query_fields) + query = f"SELECT {fields_to_query} FROM {sobject}" + + # Add the user-defined filter clause or default clause + if user_filter: + query += add_limit_offset_to_user_filter( + filter_clause=user_filter, limit_clause=limit, offset_clause=offset + ) + else: + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(sobject) + if declaration: + query += f" WHERE {declaration.where}" + query += f" LIMIT {limit}" if limit else "" + query += f" OFFSET {offset}" if offset else "" + + # Return the original input fields with "Id" added if needed + if "Id" not in fields: + fields.insert(0, "Id") + + return query, fields + + +def similarity_post_process( + load_records, + query_records: list, + fields: list, + sobject: str, + weights: list, + threshold: T.Union[float, None], +) -> T.Tuple[ + T.List[T.Union[dict, None]], T.List[T.Union[list, None]], T.Union[str, None] +]: + """Processes the query results for the similarity selection strategy""" + # Handle case where query returns 0 records + if not query_records and not threshold: + error_message = f"No records found for {sobject} in the target org." + return [], [], error_message + + load_records = list(load_records) + # Replace None values in each row with empty strings + for idx, row in enumerate(load_records): + row = [value if value is not None else "" for value in row] + load_records[idx] = row + load_record_count, query_record_count = len(load_records), len(query_records) + + complexity_constant = load_record_count * query_record_count + + select_records = [] + insert_records = [] + + if complexity_constant < 1000: + select_records, insert_records = levenshtein_post_process( + load_records, query_records, fields, weights, threshold + ) + else: + select_records, insert_records = annoy_post_process( + load_records, query_records, fields, weights, threshold + ) + + return select_records, insert_records, None + + +def annoy_post_process( + load_records: list, + query_records: list, + all_fields: list, + similarity_weights: list, + threshold: T.Union[float, None], +) -> T.Tuple[T.List[dict], list]: + """Processes the query results for the similarity selection strategy using Annoy algorithm for large number of records""" + selected_records = [] + insertion_candidates = [] + + # Split fields into load and select categories + load_field_list, select_field_list = split_and_filter_fields(fields=all_fields) + # Only select those weights for select field list + similarity_weights = [ + similarity_weights[idx] + for idx, field in enumerate(all_fields) + if field in select_field_list + ] + load_shaped_records = reorder_records( + records=load_records, original_fields=all_fields, new_fields=load_field_list + ) + select_shaped_records = reorder_records( + records=load_records, original_fields=all_fields, new_fields=select_field_list + ) + + if not query_records: + # Directly append to load record for insertion if target_records is empty + selected_records = [None for _ in load_records] + insertion_candidates = load_shaped_records + return selected_records, insertion_candidates + + query_records = replace_empty_strings_with_missing(query_records) + select_shaped_records = replace_empty_strings_with_missing(select_shaped_records) + + hash_features = 100 + num_trees = 10 + + query_record_ids = [record[0] for record in query_records] + query_record_data = [record[1:] for record in query_records] + + record_to_id_map = { + tuple(query_record_data[i]): query_record_ids[i] + for i in range(len(query_records)) + } + + final_load_vectors, final_query_vectors = vectorize_records( + select_shaped_records, + query_record_data, + hash_features=hash_features, + weights=similarity_weights, + ) + + # Create Annoy index for nearest neighbor search + vector_dimension = final_query_vectors.shape[1] + annoy_index = AnnoyIndex(vector_dimension, "euclidean") + + for i in range(len(final_query_vectors)): + annoy_index.add_item(i, final_query_vectors[i]) + + # Build the index + annoy_index.build(num_trees) + + # Find nearest neighbors for each query vector + n_neighbors = 1 + + for i, load_vector in enumerate(final_load_vectors): + # Get nearest neighbors' indices and distances + nearest_neighbors = annoy_index.get_nns_by_vector( + load_vector, n_neighbors, include_distances=True + ) + neighbor_indices = nearest_neighbors[0] # Indices of nearest neighbors + neighbor_distances = [ + distance / 2 for distance in nearest_neighbors[1] + ] # Distances sqrt(2(1-cos(u,v)))/2 lies between [0,1] + + for idx, neighbor_index in enumerate(neighbor_indices): + # Retrieve the corresponding record from the database + record = query_record_data[neighbor_index] + closest_record_id = record_to_id_map[tuple(record)] + if threshold and (neighbor_distances[idx] >= threshold): + selected_records.append(None) + insertion_candidates.append(load_shaped_records[i]) + else: + selected_records.append( + {"id": closest_record_id, "success": True, "created": False} + ) + + return selected_records, insertion_candidates + + +def levenshtein_post_process( + source_records: list, + target_records: list, + all_fields: list, + similarity_weights: list, + distance_threshold: T.Union[float, None], +) -> T.Tuple[T.List[T.Optional[dict]], T.List[T.Optional[list]]]: + """Processes query results using Levenshtein algorithm for similarity selection with a small number of records.""" + selected_records = [] + insertion_candidates = [] + + # Split fields into load and select categories + load_field_list, select_field_list = split_and_filter_fields(fields=all_fields) + # Only select those weights for select field list + similarity_weights = [ + similarity_weights[idx] + for idx, field in enumerate(all_fields) + if field in select_field_list + ] + load_shaped_records = reorder_records( + records=source_records, original_fields=all_fields, new_fields=load_field_list + ) + select_shaped_records = reorder_records( + records=source_records, original_fields=all_fields, new_fields=select_field_list + ) + + if not target_records: + # Directly append to load record for insertion if target_records is empty + selected_records = [None for _ in source_records] + insertion_candidates = load_shaped_records + return selected_records, insertion_candidates + + for select_record, load_record in zip(select_shaped_records, load_shaped_records): + closest_match, match_distance = find_closest_record( + select_record, target_records, similarity_weights + ) + + if distance_threshold and match_distance > distance_threshold: + # Append load record for insertion if distance exceeds threshold + insertion_candidates.append(load_record) + selected_records.append(None) + elif closest_match: + # Append match details if distance is within threshold + selected_records.append( + {"id": closest_match[0], "success": True, "created": False} + ) + + return selected_records, insertion_candidates + + +def random_post_process( + query_records: list, num_records: int, sobject: str +) -> T.Tuple[T.List[dict], None, T.Union[str, None]]: + """Processes the query results for the random selection strategy""" + + if not query_records: + error_message = f"No records found for {sobject} in the target org." + return [], None, error_message + + selected_records = [] + for _ in range(num_records): # Loop 'num_records' times + # Randomly select one record from query_records + random_record = random.choice(query_records) + selected_records.append( + {"id": random_record[0], "success": True, "created": False} + ) + + return selected_records, None, None + + +def find_closest_record(load_record: list, query_records: list, weights: list): + closest_distance = float("inf") + closest_record = query_records[0] + + for record in query_records: + distance = calculate_levenshtein_distance(load_record, record[1:], weights) + if distance < closest_distance: + closest_distance = distance + closest_record = record + + return closest_record, closest_distance + + +def levenshtein_distance(str1: str, str2: str): + """Calculate the Levenshtein distance between two strings""" + len_str1 = len(str1) + 1 + len_str2 = len(str2) + 1 + + dp = [[0 for _ in range(len_str2)] for _ in range(len_str1)] + + for i in range(len_str1): + dp[i][0] = i + for j in range(len_str2): + dp[0][j] = j + + for i in range(1, len_str1): + for j in range(1, len_str2): + cost = 0 if str1[i - 1] == str2[j - 1] else 1 + dp[i][j] = min( + dp[i - 1][j] + 1, # Deletion + dp[i][j - 1] + 1, # Insertion + dp[i - 1][j - 1] + cost, + ) # Substitution + + return dp[-1][-1] + + +def calculate_levenshtein_distance(record1: list, record2: list, weights: list): + if len(record1) != len(record2): + raise ValueError("Records must have the same number of fields.") + elif len(record1) != len(weights): + raise ValueError("Records must be same size as fields (weights).") + + total_distance = 0 + + for field1, field2, weight in zip(record1, record2, weights): + field1 = field1.lower() + field2 = field2.lower() + + if len(field1) == 0 and len(field2) == 0: + # If both fields are blank, distance is 0 + distance = 0 + else: + # Average distance per character + distance = levenshtein_distance(field1, field2) / max( + len(field1), len(field2) + ) + if len(field1) == 0 or len(field2) == 0: + # If one field is blank, reduce the impact of the distance + distance = distance * 0.05 # Fixed value for blank vs non-blank + + # Multiply the distance by the corresponding weight + total_distance += distance * weight + + # Average distance per character with weights + return total_distance / sum(weights) if len(weights) else 0 + + +def add_limit_offset_to_user_filter( + filter_clause: str, + limit_clause: T.Union[float, None] = None, + offset_clause: T.Union[float, None] = None, +) -> str: + + # Extract existing LIMIT and OFFSET from filter_clause if present + existing_limit_match = re.search(r"LIMIT\s+(\d+)", filter_clause, re.IGNORECASE) + existing_offset_match = re.search(r"OFFSET\s+(\d+)", filter_clause, re.IGNORECASE) + + if existing_limit_match: + existing_limit = int(existing_limit_match.group(1)) + if limit_clause is not None: # Only apply limit_clause if it's provided + limit_clause = min(existing_limit, limit_clause) + else: + limit_clause = existing_limit + + if existing_offset_match: + existing_offset = int(existing_offset_match.group(1)) + if offset_clause is not None: + offset_clause = existing_offset + offset_clause + else: + offset_clause = existing_offset + + # Remove existing LIMIT and OFFSET from filter_clause, handling potential extra spaces + filter_clause = re.sub( + r"\s+OFFSET\s+\d+\s*", " ", filter_clause, flags=re.IGNORECASE + ).strip() + filter_clause = re.sub( + r"\s+LIMIT\s+\d+\s*", " ", filter_clause, flags=re.IGNORECASE + ).strip() + + if limit_clause is not None: + filter_clause += f" LIMIT {limit_clause}" + if offset_clause is not None: + filter_clause += f" OFFSET {offset_clause}" + + return f" {filter_clause}" + + +def determine_field_types(df, weights): + numerical_features = [] + boolean_features = [] + categorical_features = [] + + numerical_weights = [] + boolean_weights = [] + categorical_weights = [] + + for col, weight in zip(df.columns, weights): + # Check if the column can be converted to numeric + try: + # Attempt to convert to numeric + df[col] = pd.to_numeric(df[col], errors="raise") + numerical_features.append(col) + numerical_weights.append(weight) + except ValueError: + # Check for boolean values + if df[col].str.lower().isin(["true", "false"]).all(): + # Map to actual boolean values + df[col] = df[col].str.lower().map({"true": True, "false": False}) + boolean_features.append(col) + boolean_weights.append(weight) + else: + categorical_features.append(col) + categorical_weights.append(weight) + + return ( + numerical_features, + boolean_features, + categorical_features, + numerical_weights, + boolean_weights, + categorical_weights, + ) + + +def vectorize_records(db_records, query_records, hash_features, weights): + # Convert database records and query records to DataFrames + df_db = pd.DataFrame(db_records) + df_query = pd.DataFrame(query_records) + + # Determine field types and corresponding weights + # Modifies boolean columns to True or False + ( + numerical_features, + boolean_features, + categorical_features, + numerical_weights, + boolean_weights, + categorical_weights, + ) = determine_field_types(df_db, weights) + + # Modify query dataframe boolean columns to True or False + for col in df_query.columns: + if df_query[col].str.lower().isin(["true", "false"]).all(): + df_query[col] = ( + df_query[col].str.lower().map({"true": True, "false": False}) + ) + + # Fit StandardScaler on the numerical features of the database records + scaler = StandardScaler() + if numerical_features: + df_db[numerical_features] = scaler.fit_transform(df_db[numerical_features]) + df_query[numerical_features] = scaler.transform(df_query[numerical_features]) + + # Use HashingVectorizer to transform the categorical features + hashing_vectorizer = HashingVectorizer( + n_features=hash_features, alternate_sign=False + ) + + # For db_records + hashed_categorical_data_db = [] + for idx, col in enumerate(categorical_features): + hashed_db = hashing_vectorizer.fit_transform(df_db[col]).toarray() + # Apply weight to the hashed vector for this categorical feature + hashed_db_weighted = hashed_db * categorical_weights[idx] + hashed_categorical_data_db.append(hashed_db_weighted) + + # For query_records + hashed_categorical_data_query = [] + for idx, col in enumerate(categorical_features): + hashed_query = hashing_vectorizer.transform(df_query[col]).toarray() + # Apply weight to the hashed vector for this categorical feature + hashed_query_weighted = hashed_query * categorical_weights[idx] + hashed_categorical_data_query.append(hashed_query_weighted) + + # Combine all feature types into a single vector for the database records + db_vectors = [] + if numerical_features: + db_vectors.append(df_db[numerical_features].values * numerical_weights) + if boolean_features: + db_vectors.append(df_db[boolean_features].astype(int).values * boolean_weights) + if hashed_categorical_data_db: + db_vectors.append(np.hstack(hashed_categorical_data_db)) + + # Concatenate database vectors + final_db_vectors = np.hstack(db_vectors) + + # Combine all feature types into a single vector for the query records + query_vectors = [] + if numerical_features: + query_vectors.append(df_query[numerical_features].values * numerical_weights) + if boolean_features: + query_vectors.append( + df_query[boolean_features].astype(int).values * boolean_weights + ) + if hashed_categorical_data_query: + query_vectors.append(np.hstack(hashed_categorical_data_query)) + + # Concatenate query vectors + final_query_vectors = np.hstack(query_vectors) + + return final_db_vectors, final_query_vectors + + +def replace_empty_strings_with_missing(records): + return [ + [(field if field != "" else "missing") for field in record] + for record in records + ] + + +def split_and_filter_fields(fields: T.List[str]) -> T.Tuple[T.List[str], T.List[str]]: + # List to store non-lookup fields (load fields) + load_fields = [] + + # Set to store unique first components of select fields + unique_components = set() + # Keep track of last flattened lookup index + last_flat_lookup_index = -1 + + # Iterate through the fields + for idx, field in enumerate(fields): + if "." in field: + # Split the field by '.' and add the first component to the set + first_component = field.split(".")[0] + unique_components.add(first_component) + last_flat_lookup_index = max(last_flat_lookup_index, idx) + else: + # Add the field to the load_fields list + load_fields.append(field) + + # Number of unique components + num_unique_components = len(unique_components) + + # Adjust select_fields by removing only the field at last_flat_lookup_index + 1 + if last_flat_lookup_index + 1 < len( + fields + ) and last_flat_lookup_index + num_unique_components < len(fields): + select_fields = ( + fields[: last_flat_lookup_index + 1] + + fields[last_flat_lookup_index + num_unique_components + 1 :] + ) + else: + select_fields = fields + + return load_fields, select_fields + + +# Function to reorder records based on the new field list +def reorder_records(records, original_fields, new_fields): + if not original_fields: + raise KeyError("original_fields should not be empty") + # Map the original field indices + field_index_map = {field: i for i, field in enumerate(original_fields)} + reordered_records = [] + + for record in records: + reordered_records.append( + [ + record[field_index_map[field]] + for field in new_fields + if field in field_index_map + ] + ) + + return reordered_records diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index edcb62afbb..b2a13bf966 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -7,7 +7,8 @@ import time from abc import ABCMeta, abstractmethod from contextlib import contextmanager -from typing import Any, Dict, List, NamedTuple, Optional +from itertools import tee +from typing import Any, Dict, List, NamedTuple, Optional, Union import requests import salesforce_bulk @@ -15,13 +16,21 @@ from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.core.utils import process_bool_arg -from cumulusci.tasks.bulkdata.utils import iterate_in_chunks +from cumulusci.tasks.bulkdata.select_utils import ( + SelectOperationExecutor, + SelectRecordRetrievalMode, + SelectStrategy, + split_and_filter_fields, +) +from cumulusci.tasks.bulkdata.utils import DataApi, iterate_in_chunks from cumulusci.utils.classutils import namedtuple_as_simple_dict from cumulusci.utils.xml import lxml_parse_string DEFAULT_BULK_BATCH_SIZE = 10_000 DEFAULT_REST_BATCH_SIZE = 200 MAX_REST_BATCH_SIZE = 200 +HIGH_PRIORITY_VALUE = 3 +LOW_PRIORITY_VALUE = 0.5 csv.field_size_limit(2**27) # 128 MB @@ -36,14 +45,7 @@ class DataOperationType(StrEnum): UPSERT = "upsert" ETL_UPSERT = "etl_upsert" SMART_UPSERT = "smart_upsert" # currently undocumented - - -class DataApi(StrEnum): - """Enum defining requested Salesforce data API for an operation.""" - - BULK = "bulk" - REST = "rest" - SMART = "smart" + SELECT = "select" class DataOperationStatus(StrEnum): @@ -320,6 +322,11 @@ def get_prev_record_values(self, records): """Get the previous records values in case of UPSERT and UPDATE to prepare for rollback""" pass + @abstractmethod + def select_records(self, records): + """Perform the requested DML operation on the supplied row iterator.""" + pass + @abstractmethod def load_records(self, records): """Perform the requested DML operation on the supplied row iterator.""" @@ -338,7 +345,20 @@ def get_results(self): class BulkApiDmlOperation(BaseDmlOperation, BulkJobMixin): """Operation class for all DML operations run using the Bulk API.""" - def __init__(self, *, sobject, operation, api_options, context, fields): + def __init__( + self, + *, + sobject, + operation, + api_options, + context, + fields, + selection_strategy=SelectStrategy.STANDARD, + selection_filter=None, + selection_priority_fields=None, + content_type=None, + threshold=None, + ): super().__init__( sobject=sobject, operation=operation, @@ -353,18 +373,27 @@ def __init__(self, *, sobject, operation, api_options, context, fields): self.csv_buff = io.StringIO(newline="") self.csv_writer = csv.writer(self.csv_buff, quoting=csv.QUOTE_ALL) + self.select_operation_executor = SelectOperationExecutor(selection_strategy) + self.selection_filter = selection_filter + self.weights = assign_weights( + priority_fields=selection_priority_fields, fields=fields + ) + self.content_type = content_type if content_type else "CSV" + self.threshold = threshold + def start(self): self.job_id = self.bulk.create_job( self.sobject, self.operation.value, - contentType="CSV", + contentType=self.content_type, concurrency=self.api_options.get("bulk_mode", "Parallel"), external_id_name=self.api_options.get("update_key"), ) def end(self): self.bulk.close_job(self.job_id) - self.job_result = self._wait_for_job(self.job_id) + if not self.job_result: + self.job_result = self._wait_for_job(self.job_id) def get_prev_record_values(self, records): """Get the previous values of the records based on the update key @@ -424,6 +453,161 @@ def load_records(self, records): self.context.logger.info(f"Uploading batch {count + 1}") self.batch_ids.append(self.bulk.post_batch(self.job_id, iter(csv_batch))) + def select_records(self, records): + """Executes a SOQL query to select records and adds them to results""" + + self.select_results = [] # Store selected records + query_records = [] + # Create a copy of the generator using tee + records, records_copy = tee(records) + # Count total number of records to fetch using the copy + total_num_records = sum(1 for _ in records_copy) + limit_clause = self._determine_limit_clause(total_num_records=total_num_records) + + # Generate and execute SOQL query + # (not passing offset as it is not supported in Bulk) + ( + select_query, + query_fields, + ) = self.select_operation_executor.select_generate_query( + sobject=self.sobject, + fields=self.fields, + user_filter=self.selection_filter if self.selection_filter else None, + limit=limit_clause, + offset=None, + ) + + # Execute the main select query using Bulk API + select_query_records = self._execute_select_query( + select_query=select_query, query_fields=query_fields + ) + + query_records.extend(select_query_records) + # Post-process the query results + ( + selected_records, + insert_records, + error_message, + ) = self.select_operation_executor.select_post_process( + load_records=records, + query_records=query_records, + fields=self.fields, + num_records=total_num_records, + sobject=self.sobject, + weights=self.weights, + threshold=self.threshold, + ) + + # Log the number of selected and prepared for insertion records + num_selected = sum(1 for record in selected_records if record) + num_prepared = len(insert_records) if insert_records else 0 + + self.logger.info( + f"{num_selected} records selected." + + ( + f" {num_prepared} records prepared for insertion." + if num_prepared > 0 + else "" + ) + ) + + if insert_records: + self._process_insert_records(insert_records, selected_records) + + if not error_message: + self.select_results.extend(selected_records) + + # Update job result based on selection outcome + self.job_result = DataOperationJobResult( + status=( + DataOperationStatus.SUCCESS + if len(self.select_results) + else DataOperationStatus.JOB_FAILURE + ), + job_errors=[error_message] if error_message else [], + records_processed=len(self.select_results), + total_row_errors=0, + ) + + def _process_insert_records(self, insert_records, selected_records): + """Processes and inserts records if necessary.""" + insert_fields, _ = split_and_filter_fields(fields=self.fields) + insert_step = BulkApiDmlOperation( + sobject=self.sobject, + operation=DataOperationType.INSERT, + api_options=self.api_options, + context=self.context, + fields=insert_fields, + ) + insert_step.start() + insert_step.load_records(insert_records) + insert_step.end() + # Retrieve insert results + insert_results = [] + for batch_id in insert_step.batch_ids: + try: + results_url = f"{insert_step.bulk.endpoint}/job/{insert_step.job_id}/batch/{batch_id}/result" + # Download entire result file to a temporary file first + # to avoid the server dropping connections + with download_file(results_url, insert_step.bulk) as f: + self.logger.info(f"Downloaded results for batch {batch_id}") + reader = csv.reader(f) + next(reader) # Skip header row + for row in reader: + success = process_bool_arg(row[1]) + created = process_bool_arg(row[2]) + insert_results.append( + {"id": row[0], "success": success, "created": created} + ) + except Exception as e: + raise BulkDataException( + f"Failed to download results for batch {batch_id} ({str(e)})" + ) + + insert_index = 0 + for idx, record in enumerate(selected_records): + if record is None: + selected_records[idx] = insert_results[insert_index] + insert_index += 1 + + def _determine_limit_clause(self, total_num_records): + """Determines the LIMIT clause based on the retrieval mode.""" + if ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.ALL + ): + return None + elif ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.MATCH + ): + return total_num_records + + def _execute_select_query(self, select_query: str, query_fields: List[str]): + """Executes the select Bulk API query, retrieves results in JSON, and converts to CSV format if needed.""" + self.batch_id = self.bulk.query(self.job_id, select_query) + self.bulk.wait_for_batch(self.job_id, self.batch_id) + result_ids = self.bulk.get_query_batch_result_ids( + self.batch_id, job_id=self.job_id + ) + select_query_records = [] + + for result_id in result_ids: + # Modify URI to request JSON format + uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}?format=json" + # Download JSON data + with download_file(uri, self.bulk) as f: + data = json.load(f) + # Get headers from fields, expanding nested structures for TYPEOF results + self.headers = query_fields + + # Convert each record to a flat row + for record in data: + flat_record = flatten_record(record, self.headers) + select_query_records.append(flat_record) + + return select_query_records + def _batch(self, records, n, char_limit=10000000): """Given an iterator of records, yields batches of records serialized in .csv format. @@ -472,6 +656,29 @@ def _serialize_csv_record(self, record): return serialized def get_results(self): + """ + Retrieves and processes the results of a Bulk API operation. + """ + + if self.operation is DataOperationType.QUERY: + yield from self._get_query_results() + else: + yield from self._get_batch_results() + + def _get_query_results(self): + """Handles results for QUERY (select) operations""" + for row in self.select_results: + success = process_bool_arg(row["success"]) + created = process_bool_arg(row["created"]) + yield DataOperationResult( + row["id"] if success else "", + success, + "", + created, + ) + + def _get_batch_results(self): + """Handles results for other DataOperationTypes (insert, update, etc.)""" for batch_id in self.batch_ids: try: results_url = ( @@ -481,29 +688,46 @@ def get_results(self): # to avoid the server dropping connections with download_file(results_url, self.bulk) as f: self.logger.info(f"Downloaded results for batch {batch_id}") + yield from self._parse_batch_results(f) - reader = csv.reader(f) - next(reader) # skip header - - for row in reader: - success = process_bool_arg(row[1]) - created = process_bool_arg(row[2]) - yield DataOperationResult( - row[0] if success else None, - success, - row[3] if not success else None, - created, - ) except Exception as e: raise BulkDataException( f"Failed to download results for batch {batch_id} ({str(e)})" ) + def _parse_batch_results(self, f): + """Parses batch results from the downloaded file""" + reader = csv.reader(f) + next(reader) # Skip header row + + for row in reader: + success = process_bool_arg(row[1]) + created = process_bool_arg(row[2]) + yield DataOperationResult( + row[0] if success else None, + success, + row[3] if not success else None, + created, + ) + class RestApiDmlOperation(BaseDmlOperation): """Operation class for all DML operations run using the REST API.""" - def __init__(self, *, sobject, operation, api_options, context, fields): + def __init__( + self, + *, + sobject, + operation, + api_options, + context, + fields, + selection_strategy=SelectStrategy.STANDARD, + selection_filter=None, + selection_priority_fields=None, + content_type=None, + threshold=None, + ): super().__init__( sobject=sobject, operation=operation, @@ -517,7 +741,9 @@ def __init__(self, *, sobject, operation, api_options, context, fields): field["name"]: field for field in getattr(context.sf, sobject).describe()["fields"] } - self.boolean_fields = [f for f in fields if describe[f]["type"] == "boolean"] + self.boolean_fields = [ + f for f in fields if "." not in f and describe[f]["type"] == "boolean" + ] self.api_options = api_options.copy() self.api_options["batch_size"] = ( self.api_options.get("batch_size") or DEFAULT_REST_BATCH_SIZE @@ -526,6 +752,14 @@ def __init__(self, *, sobject, operation, api_options, context, fields): self.api_options["batch_size"], MAX_REST_BATCH_SIZE ) + self.select_operation_executor = SelectOperationExecutor(selection_strategy) + self.selection_filter = selection_filter + self.weights = assign_weights( + priority_fields=selection_priority_fields, fields=fields + ) + self.content_type = content_type + self.threshold = threshold + def _record_to_json(self, rec): result = dict(zip(self.fields, rec)) for boolean_field in self.boolean_fields: @@ -623,14 +857,151 @@ def load_records(self, records): row_errors = len([res for res in self.results if not res["success"]]) self.job_result = DataOperationJobResult( - DataOperationStatus.SUCCESS - if not row_errors - else DataOperationStatus.ROW_FAILURE, + ( + DataOperationStatus.SUCCESS + if not row_errors + else DataOperationStatus.ROW_FAILURE + ), [], len(self.results), row_errors, ) + def select_records(self, records): + """Executes a SOQL query to select records and adds them to results""" + + self.results = [] + query_records = [] + + # Create a copy of the generator using tee + records, records_copy = tee(records) + + # Count total number of records to fetch using the copy + total_num_records = sum(1 for _ in records_copy) + + # Set LIMIT condition + limit_clause = self._determine_limit_clause(total_num_records) + + # Generate the SOQL query based on the selection strategy + ( + select_query, + query_fields, + ) = self.select_operation_executor.select_generate_query( + sobject=self.sobject, + fields=self.fields, + user_filter=self.selection_filter or None, + limit=limit_clause, + offset=None, + ) + + # Execute the query and gather the records + query_records = self._execute_soql_query(select_query, query_fields) + + # Post-process the query results for this batch + ( + selected_records, + insert_records, + error_message, + ) = self.select_operation_executor.select_post_process( + load_records=records, + query_records=query_records, + fields=self.fields, + num_records=total_num_records, + sobject=self.sobject, + weights=self.weights, + threshold=self.threshold, + ) + + # Log the number of selected and prepared for insertion records + num_selected = sum(1 for record in selected_records if record) + num_prepared = len(insert_records) if insert_records else 0 + + self.logger.info( + f"{num_selected} records selected." + + ( + f" {num_prepared} records prepared for insertion." + if num_prepared > 0 + else "" + ) + ) + + if insert_records: + self._process_insert_records(insert_records, selected_records) + + if not error_message: + # Add selected records from this batch to the overall results + self.results.extend(selected_records) + + # Update the job result based on the overall selection outcome + self._update_job_result(error_message) + + def _determine_limit_clause(self, total_num_records): + """Determines the LIMIT clause based on the retrieval mode.""" + if ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.ALL + ): + return None + elif ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.MATCH + ): + return total_num_records + + def _execute_soql_query(self, select_query, query_fields): + """Executes the SOQL query and returns the flattened records.""" + query_records = [] + response = self.sf.restful( + requests.utils.requote_uri(f"query/?q={select_query}"), method="GET" + ) + query_records.extend(self._flatten_response_records(response, query_fields)) + + while not response["done"]: + response = self.sf.query_more( + response["nextRecordsUrl"], identifier_is_url=True + ) + query_records.extend(self._flatten_response_records(response, query_fields)) + + return query_records + + def _flatten_response_records(self, response, query_fields): + """Flattens the response records and returns them as a list.""" + return [flatten_record(record, query_fields) for record in response["records"]] + + def _process_insert_records(self, insert_records, selected_records): + """Processes and inserts records if necessary.""" + insert_fields, _ = split_and_filter_fields(fields=self.fields) + insert_step = RestApiDmlOperation( + sobject=self.sobject, + operation=DataOperationType.INSERT, + api_options=self.api_options, + context=self.context, + fields=insert_fields, + ) + insert_step.start() + insert_step.load_records(insert_records) + insert_step.end() + insert_results = insert_step.results + + insert_index = 0 + for idx, record in enumerate(selected_records): + if record is None: + selected_records[idx] = insert_results[insert_index] + insert_index += 1 + + def _update_job_result(self, error_message): + """Updates the job result based on the selection outcome.""" + self.job_result = DataOperationJobResult( + status=( + DataOperationStatus.SUCCESS + if len(self.results) + else DataOperationStatus.JOB_FAILURE + ), + job_errors=[error_message] if error_message else [], + records_processed=len(self.results), + total_row_errors=0, + ) + def get_results(self): """Return a generator of DataOperationResult objects.""" @@ -712,6 +1083,11 @@ def get_dml_operation( context: Any, volume: int, api: Optional[DataApi] = DataApi.SMART, + selection_strategy: SelectStrategy = SelectStrategy.STANDARD, + selection_filter: Union[str, None] = None, + selection_priority_fields: Union[dict, None] = None, + content_type: Union[str, None] = None, + threshold: Union[float, None] = None, ) -> BaseDmlOperation: """Create an appropriate DmlOperation instance for the given parameters, selecting between REST and Bulk APIs based upon volume (Bulk used at volumes over 2000 records, @@ -745,4 +1121,96 @@ def get_dml_operation( api_options=api_options, context=context, fields=fields, + selection_strategy=selection_strategy, + selection_filter=selection_filter, + selection_priority_fields=selection_priority_fields, + content_type=content_type, + threshold=threshold, ) + + +def extract_flattened_headers(query_fields): + """Extract headers from query fields, including handling of TYPEOF fields.""" + headers = [] + + for field in query_fields: + if isinstance(field, dict): + # Handle TYPEOF / polymorphic fields + for lookup, references in field.items(): + # Assuming each reference is a list of dictionaries + for ref_type in references: + for ref_obj, ref_fields in ref_type.items(): + for nested_field in ref_fields: + headers.append( + f"{lookup}.{ref_obj}.{nested_field}" + ) # Flatten the structure + else: + # Regular fields + headers.append(field) + + return headers + + +def flatten_record(record, headers): + """Flatten each record to match headers, handling nested fields.""" + flat_record = [] + + for field in headers: + components = field.split(".") + value = "" + + # Handle lookup fields with two or three components + if len(components) >= 2: + lookup_field = components[0] + lookup = record.get(lookup_field, None) + + # Check if lookup field exists in the record + if lookup is None: + value = "" + else: + if len(components) == 2: + # Handle fields with two components: {lookup}.{ref_field} + ref_field = components[1] + value = lookup.get(ref_field, "") + elif len(components) == 3: + # Handle fields with three components: {lookup}.{ref_obj}.{ref_field} + ref_obj, ref_field = components[1], components[2] + # Check if the type matches the specified ref_obj + if lookup.get("attributes", {}).get("type") == ref_obj: + value = lookup.get(ref_field, "") + else: + value = "" + + else: + # Regular fields or non-polymorphic fields + value = record.get(field, "") + + # Set None values to empty string + if value is None: + value = "" + elif not isinstance(value, str): + value = str(value) + + # Append the resolved value to the flattened record + flat_record.append(value) + + return flat_record + + +def assign_weights( + priority_fields: Union[Dict[str, str], None], fields: List[str] +) -> list: + # If priority_fields is None or an empty dictionary, set all weights to 1 + if not priority_fields: + return [1] * len(fields) + + # Initialize the weight list with LOW_PRIORITY_VALUE + weights = [LOW_PRIORITY_VALUE] * len(fields) + + # Iterate over the fields and assign weights based on priority_fields + for i, field in enumerate(fields): + if field in priority_fields: + # Set weight to HIGH_PRIORITY_VALUE if field is in priority_fields + weights[i] = HIGH_PRIORITY_VALUE + + return weights diff --git a/cumulusci/tasks/bulkdata/tests/mapping_after.yml b/cumulusci/tasks/bulkdata/tests/mapping_after.yml index 2d3ebdb726..9c8798cf0e 100644 --- a/cumulusci/tasks/bulkdata/tests/mapping_after.yml +++ b/cumulusci/tasks/bulkdata/tests/mapping_after.yml @@ -4,6 +4,7 @@ Insert Accounts: table: accounts fields: Id: sf_id + Name: Name lookups: ParentId: after: Insert Accounts @@ -17,16 +18,21 @@ Insert Contacts: table: contacts fields: Id: sf_id + LastName: LastName lookups: ReportsToId: after: Insert Contacts table: contacts + Insert Opportunities: api: bulk sf_object: Opportunity table: opportunities fields: Id: sf_id + CloseDate: CloseDate + StageName: StageName + Name: Name lookups: AccountId: table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select.yml b/cumulusci/tasks/bulkdata/tests/mapping_select.yml new file mode 100644 index 0000000000..e549d7a474 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select.yml @@ -0,0 +1,20 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_strategy.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_strategy.yml new file mode 100644 index 0000000000..6ab196fda6 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_strategy.yml @@ -0,0 +1,20 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: invalid_strategy + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_number.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_number.yml new file mode 100644 index 0000000000..1bad614b1d --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_number.yml @@ -0,0 +1,21 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + threshold: 1.5 + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_strategy.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_strategy.yml new file mode 100644 index 0000000000..71958848c5 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_strategy.yml @@ -0,0 +1,21 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: standard + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + threshold: 0.5 + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__non_float.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__non_float.yml new file mode 100644 index 0000000000..2ff1482f3d --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__non_float.yml @@ -0,0 +1,21 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + threshold: invalid threshold + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_missing_priority_fields.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_missing_priority_fields.yml new file mode 100644 index 0000000000..34011945ad --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_missing_priority_fields.yml @@ -0,0 +1,22 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + - Name + - AccountNumber + - ParentId + - Email + fields: + - Name + - AccountNumber + - Description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_no_priority_fields.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_no_priority_fields.yml new file mode 100644 index 0000000000..1559848b48 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_no_priority_fields.yml @@ -0,0 +1,18 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + fields: + - Name + - AccountNumber + - Description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_v1.yml b/cumulusci/tasks/bulkdata/tests/mapping_v1.yml index 0d04fbb90d..6fe35ef1d6 100644 --- a/cumulusci/tasks/bulkdata/tests/mapping_v1.yml +++ b/cumulusci/tasks/bulkdata/tests/mapping_v1.yml @@ -4,6 +4,7 @@ Insert Households: table: households fields: Id: sf_id + Name: Name static: Name: TestHousehold record_type: HH_Account diff --git a/cumulusci/tasks/bulkdata/tests/person_accounts_minimal.yml b/cumulusci/tasks/bulkdata/tests/person_accounts_minimal.yml index 49bd555a7e..c8520397e0 100644 --- a/cumulusci/tasks/bulkdata/tests/person_accounts_minimal.yml +++ b/cumulusci/tasks/bulkdata/tests/person_accounts_minimal.yml @@ -8,6 +8,7 @@ Insert PersonContact: table: PersonContact fields: - IsPersonAccount + - LastName lookups: AccountId: table: Account diff --git a/cumulusci/tasks/bulkdata/tests/test_extract.py b/cumulusci/tasks/bulkdata/tests/test_extract.py index e2fd9d65c7..996584a2a5 100644 --- a/cumulusci/tasks/bulkdata/tests/test_extract.py +++ b/cumulusci/tasks/bulkdata/tests/test_extract.py @@ -115,7 +115,7 @@ def test_run__person_accounts_disabled(self, query_op_mock): sobject="Account", api_options={}, context=task, - query="SELECT Id FROM Account", + query="SELECT Id, Name FROM Account", ) mock_query_contacts = MockBulkQueryOperation( sobject="Contact", @@ -123,7 +123,7 @@ def test_run__person_accounts_disabled(self, query_op_mock): context=task, query="SELECT Id, FirstName, LastName, Email, AccountId FROM Contact", ) - mock_query_households.results = [["1"]] + mock_query_households.results = [["1", "None"]] mock_query_contacts.results = [ ["2", "First", "Last", "test@example.com", "1"] ] @@ -170,7 +170,7 @@ def test_run__person_accounts_enabled(self, query_op_mock): sobject="Account", api_options={}, context=task, - query="SELECT Id, IsPersonAccount FROM Account", + query="SELECT Id, Name IsPersonAccount FROM Account", ) mock_query_contacts = MockBulkQueryOperation( sobject="Contact", @@ -178,7 +178,7 @@ def test_run__person_accounts_enabled(self, query_op_mock): context=task, query="SELECT Id, FirstName, LastName, Email, IsPersonAccount, AccountId FROM Contact", ) - mock_query_households.results = [["1", "false"]] + mock_query_households.results = [["1", "None", "false"]] mock_query_contacts.results = [ ["2", "First", "Last", "test@example.com", "true", "1"] ] diff --git a/cumulusci/tasks/bulkdata/tests/test_load.py b/cumulusci/tasks/bulkdata/tests/test_load.py index 3277ea3237..8fb8ee0756 100644 --- a/cumulusci/tasks/bulkdata/tests/test_load.py +++ b/cumulusci/tasks/bulkdata/tests/test_load.py @@ -122,9 +122,8 @@ def test_run(self, dml_mock): mock_describe_calls() task() - assert step.records == [ - ["TestHousehold", "1"], + ["TestHousehold", "TestHousehold", "1"], ["Test", "User", "test@example.com", "001000000000000"], ["Error", "User", "error@example.com", "001000000000000"], ] @@ -387,9 +386,8 @@ def test_run__sql(self, dml_mock): ] mock_describe_calls() task() - assert step.records == [ - ["TestHousehold", "1"], + [None, "TestHousehold", "1"], ["Test☃", "User", "test@example.com", "001000000000000"], ["Error", "User", "error@example.com", "001000000000000"], ] @@ -808,6 +806,111 @@ def test_stream_queried_data__skips_empty_rows(self): ["001000000006", "001000000008"], ] == records + def test_process_lookup_fields_polymorphic(self): + task = _make_task( + LoadData, + { + "options": { + "sql_path": Path(__file__).parent + / "test_query_db_joins_lookups.sql", + "mapping": Path(__file__).parent + / "test_query_db_joins_lookups_select.yml", + } + }, + ) + polymorphic_fields = { + "WhoId": { + "name": "WhoId", + "referenceTo": ["Contact", "Lead"], + "relationshipName": "Who", + }, + "WhatId": { + "name": "WhatId", + "referenceTo": ["Account"], + "relationshipName": "What", + }, + } + + expected_fields = [ + "Subject", + "Who.Contact.FirstName", + "Who.Contact.LastName", + "Who.Lead.LastName", + "WhoId", + ] + expected_priority_fields_keys = { + "Who.Contact.FirstName", + "Who.Contact.LastName", + "Who.Lead.LastName", + } + with mock.patch( + "cumulusci.tasks.bulkdata.load.validate_and_inject_mapping" + ), mock.patch.object(task, "sf", create=True): + task._init_mapping() + with task._init_db(): + task._old_format = mock.Mock(return_value=False) + mapping = task.mapping["Select Event"] + fields = mapping.get_load_field_list() + task.process_lookup_fields( + mapping=mapping, fields=fields, polymorphic_fields=polymorphic_fields + ) + assert fields == expected_fields + assert ( + set(mapping.select_options.priority_fields.keys()) + == expected_priority_fields_keys + ) + + def test_process_lookup_fields_non_polymorphic(self): + task = _make_task( + LoadData, + { + "options": { + "sql_path": Path(__file__).parent + / "test_query_db_joins_lookups.sql", + "mapping": Path(__file__).parent + / "test_query_db_joins_lookups_select.yml", + } + }, + ) + non_polymorphic_fields = { + "AccountId": { + "name": "AccountId", + "referenceTo": ["Account"], + "relationshipName": "Account", + } + } + + expected_fields = [ + "FirstName", + "LastName", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ] + expected_priority_fields_keys = { + "FirstName", + "Account.Name", + "Account.AccountNumber", + } + with mock.patch( + "cumulusci.tasks.bulkdata.load.validate_and_inject_mapping" + ), mock.patch.object(task, "sf", create=True): + task._init_mapping() + with task._init_db(): + task._old_format = mock.Mock(return_value=False) + mapping = task.mapping["Select Contact"] + fields = mapping.get_load_field_list() + task.process_lookup_fields( + mapping=mapping, + fields=fields, + polymorphic_fields=non_polymorphic_fields, + ) + assert fields == expected_fields + assert ( + set(mapping.select_options.priority_fields.keys()) + == expected_priority_fields_keys + ) + @responses.activate def test_stream_queried_data__adjusts_relative_dates(self): mock_describe_calls() @@ -871,20 +974,31 @@ def test_get_statics_record_type_not_matched(self): assert "RecordType" in str(e.value) def test_query_db__joins_self_lookups(self): + """SQL file in Old Format""" _validate_query_for_mapping_step( sql_path=Path(__file__).parent / "test_query_db__joins_self_lookups.sql", mapping=Path(__file__).parent / "test_query_db__joins_self_lookups.yml", mapping_step_name="Update Accounts", - expected="""SELECT accounts.id AS accounts_id, accounts."Name" AS "accounts_Name", cumulusci_id_table_1.sf_id AS cumulusci_id_table_1_sf_id FROM accounts LEFT OUTER JOIN cumulusci_id_table AS cumulusci_id_table_1 ON cumulusci_id_table_1.id = ? || accounts.parent_id ORDER BY accounts.parent_id""", + expected="""SELECT accounts.id AS accounts_id, accounts."Name" AS "accounts_Name", cumulusci_id_table_1.sf_id AS cumulusci_id_table_1_sf_id FROM accounts LEFT OUTER JOIN cumulusci_id_table AS cumulusci_id_table_1 ON cumulusci_id_table_1.id = ? || cast(accounts.parent_id as varchar) ORDER BY accounts.parent_id""", old_format=True, ) + def test_query_db__joins_select_lookups(self): + """SQL File in New Format (Select)""" + _validate_query_for_mapping_step( + sql_path=Path(__file__).parent / "test_query_db_joins_lookups.sql", + mapping=Path(__file__).parent / "test_query_db_joins_lookups_select.yml", + mapping_step_name="Select Event", + expected='''SELECT events.id AS events_id, events."subject" AS "events_subject", "whoid_contacts_alias"."firstname" AS "whoid_contacts_alias_firstname", "whoid_contacts_alias"."lastname" AS "whoid_contacts_alias_lastname", "whoid_leads_alias"."lastname" AS "whoid_leads_alias_lastname", cumulusci_id_table_1.sf_id AS cumulusci_id_table_1_sf_id FROM events LEFT OUTER JOIN contacts AS "whoid_contacts_alias" ON "whoid_contacts_alias".id=events."whoid" LEFT OUTER JOIN leads AS "whoid_leads_alias" ON "whoid_leads_alias".id=events."whoid" LEFT OUTER JOIN cumulusci_id_table AS cumulusci_id_table_1 ON cumulusci_id_table_1.id=? || cast(events."whoid" as varchar) ORDER BY events."whoid"''', + ) + def test_query_db__joins_polymorphic_lookups(self): + """SQL File in New Format (Polymorphic)""" _validate_query_for_mapping_step( sql_path=Path(__file__).parent / "test_query_db_joins_lookups.sql", mapping=Path(__file__).parent / "test_query_db_joins_lookups.yml", mapping_step_name="Update Event", - expected="""SELECT events.id AS events_id, events."Subject" AS "events_Subject", cumulusci_id_table_1.sf_id AS cumulusci_id_table_1_sf_id FROM events LEFT OUTER JOIN cumulusci_id_table AS cumulusci_id_table_1 ON cumulusci_id_table_1.id = ? || events."WhoId" ORDER BY events."WhoId" """, + expected="""SELECT events.id AS events_id, events."Subject" AS "events_Subject", cumulusci_id_table_1.sf_id AS cumulusci_id_table_1_sf_id FROM events LEFT OUTER JOIN cumulusci_id_table AS cumulusci_id_table_1 ON cumulusci_id_table_1.id = ? || cast(events."WhoId" as varchar) ORDER BY events."WhoId" """, ) @responses.activate diff --git a/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py b/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py index 432c2a0e50..8ce38ff5a8 100644 --- a/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py +++ b/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py @@ -17,6 +17,7 @@ parse_from_yaml, validate_and_inject_mapping, ) +from cumulusci.tasks.bulkdata.select_utils import SelectStrategy from cumulusci.tasks.bulkdata.step import DataApi, DataOperationType from cumulusci.tests.util import DummyOrgConfig, mock_describe_calls @@ -213,6 +214,70 @@ def test_get_relative_date_e2e(self): date.today(), ) + def test_select_options__success(self): + base_path = Path(__file__).parent / "mapping_select.yml" + result = parse_from_yaml(base_path) + + step = result["Select Accounts"] + select_options = step.select_options + assert select_options + assert select_options.strategy == SelectStrategy.SIMILARITY + assert select_options.filter == "WHEN Name in ('Sample Account')" + assert select_options.priority_fields + + def test_select_options__invalid_strategy(self): + base_path = Path(__file__).parent / "mapping_select_invalid_strategy.yml" + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + assert "Invalid strategy value: invalid_strategy" in str(e.value) + + def test_select_options__invalid_threshold__non_float(self): + base_path = ( + Path(__file__).parent / "mapping_select_invalid_threshold__non_float.yml" + ) + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + assert "value is not a valid float" in str(e.value) + + def test_select_options__invalid_threshold__invalid_strategy(self): + base_path = ( + Path(__file__).parent + / "mapping_select_invalid_threshold__invalid_strategy.yml" + ) + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + assert ( + "If a threshold is specified, the strategy must be set to 'similarity'." + in str(e.value) + ) + + def test_select_options__invalid_threshold__invalid_number(self): + base_path = ( + Path(__file__).parent + / "mapping_select_invalid_threshold__invalid_number.yml" + ) + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + assert "Threshold must be between 0 and 1, got 1.5" in str(e.value) + + def test_select_options__missing_priority_fields(self): + base_path = Path(__file__).parent / "mapping_select_missing_priority_fields.yml" + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + print(str(e.value)) + assert ( + "Priority fields {'Email'} are not present in 'fields' or 'lookups'" + in str(e.value) + ) + + def test_select_options__no_priority_fields(self): + base_path = Path(__file__).parent / "mapping_select_no_priority_fields.yml" + result = parse_from_yaml(base_path) + + step = result["Select Accounts"] + select_options = step.select_options + assert select_options.priority_fields == {} + # Start of FLS/Namespace Injection Unit Tests def test_is_injectable(self): @@ -323,6 +388,64 @@ def test_validate_field_dict__injection(self): assert ms.fields_ == {"Id": "Id", "Name": "Name", "npsp__Test__c": "Test__c"} + def test_validate_fields_required(self): + ms = MappingStep( + sf_object="Account", + fields=["Id", "Name", "Test__c"], + action=DataOperationType.INSERT, + ) + fields_describe = CaseInsensitiveDict( + { + "Name": { + "createable": True, + "nillable": False, + "defaultedOnCreate": False, + "defaultValue": None, + }, + "npsp__Test__c": { + "createable": True, + "nillable": False, + "defaultedOnCreate": False, + "defaultValue": None, + }, + } + ) + ms._validate_field_dict( + describe=fields_describe, + field_dict=ms.fields_, + inject=lambda field: f"npsp__{field}", + strip=None, + drop_missing=False, + data_operation_type=DataOperationType.INSERT, + ) + assert ms.fields_ == {"Id": "Id", "Name": "Name", "npsp__Test__c": "Test__c"} + assert ms.check_required(fields_describe) + + def test_validate_fields_required_missing(self): + ms = MappingStep( + sf_object="Account", + fields=["Test__c"], + action=DataOperationType.INSERT, + ) + fields_describe = CaseInsensitiveDict( + { + "Name": { + "createable": True, + "nillable": False, + "defaultedOnCreate": False, + "defaultValue": None, + }, + "Test__c": { + "createable": True, + "nillable": False, + "defaultedOnCreate": False, + "defaultValue": None, + }, + } + ) + assert ms.fields_ == {"Test__c": "Test__c"} + assert not ms.check_required(fields_describe) + def test_validate_field_dict__injection_duplicate_fields(self): ms = MappingStep( sf_object="Account", @@ -930,7 +1053,7 @@ def test_validate_and_inject_mapping_removes_lookups_with_drop_missing(self): StringIO( ( "Insert Accounts:\n sf_object: NotAccount\n table: Account\n fields:\n - Nonsense__c\n" - "Insert Contacts:\n sf_object: Contact\n table: Contact\n lookups:\n AccountId:\n table: Account" + "Insert Contacts:\n sf_object: Contact\n table: Contact\n fields:\n - LastName\n lookups:\n AccountId:\n table: Account" ) ) ) @@ -1027,7 +1150,7 @@ def test_validate_and_inject_mapping_throws_exception_required_lookup_dropped(se StringIO( ( "Insert Accounts:\n sf_object: NotAccount\n table: Account\n fields:\n - Nonsense__c\n" - "Insert Contacts:\n sf_object: Contact\n table: Contact\n lookups:\n Id:\n table: Account" + "Insert Contacts:\n sf_object: Contact\n table: Contact\n fields:\n - LastName\n lookups:\n Id:\n table: Account" ) ) ) @@ -1045,6 +1168,40 @@ def test_validate_and_inject_mapping_throws_exception_required_lookup_dropped(se drop_missing=True, ) + @responses.activate + def test_validate_and_inject_mapping_throws_exception_required_fields_missing( + self, caplog + ): + caplog.set_level(logging.ERROR) + mock_describe_calls() + mapping = parse_from_yaml( + StringIO( + ( + "Insert Accounts:\n sf_object: Account\n table: Account\n fields:\n - ns__Description__c\n" + ) + ) + ) + org_config = DummyOrgConfig( + {"instance_url": "https://example.com", "access_token": "abc123"}, "test" + ) + + validate_and_inject_mapping( + mapping=mapping, + sf=org_config.salesforce_client, + namespace="", + data_operation=DataOperationType.INSERT, + inject_namespaces=False, + drop_missing=False, + ) + + expected_error_message = ( + "One or more required fields are missing for loading on Account :{'Name'}" + ) + error_logs = [ + record.message for record in caplog.records if record.levelname == "ERROR" + ] + assert any(expected_error_message in error_log for error_log in error_logs) + @responses.activate def test_validate_and_inject_mapping_injects_namespaces(self): mock_describe_calls() @@ -1206,7 +1363,7 @@ def test_validate_and_inject_mapping_works_case_insensitively(self): StringIO( ( "Insert Accounts:\n sf_object: account\n table: account\n fields:\n - name\n" - "Insert Contacts:\n sf_object: contact\n table: contact\n fields:\n - fIRSTnAME\n lookups:\n accountid:\n table: account" + "Insert Contacts:\n sf_object: contact\n table: contact\n fields:\n - LaSTnAME\n lookups:\n accountid:\n table: account" ) ) ) diff --git a/cumulusci/tasks/bulkdata/tests/test_query_db__joins_self_lookups.sql b/cumulusci/tasks/bulkdata/tests/test_query_db__joins_self_lookups.sql index 818bece666..39b988a9a4 100644 --- a/cumulusci/tasks/bulkdata/tests/test_query_db__joins_self_lookups.sql +++ b/cumulusci/tasks/bulkdata/tests/test_query_db__joins_self_lookups.sql @@ -1,6 +1,6 @@ BEGIN TRANSACTION; CREATE TABLE "accounts" ( - id VARCHAR(255) NOT NULL, + id INTEGER NOT NULL, "Name" VARCHAR(255), "parent_id" VARCHAR(255), PRIMARY KEY (id) diff --git a/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.sql b/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.sql index 113e5cebe5..ed7f0e694a 100644 --- a/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.sql +++ b/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.sql @@ -1,13 +1,23 @@ BEGIN TRANSACTION; +CREATE TABLE "accounts" ( + id VARCHAR(255) NOT NULL, + "Name" VARCHAR(255), + "AccountNumber" VARCHAR(255), + PRIMARY KEY (id) +); +INSERT INTO "accounts" VALUES("Account-1",'Bluth Company','123456'); +INSERT INTO "accounts" VALUES("Account-2",'Sampson PLC','567890'); + CREATE TABLE "contacts" ( id VARCHAR(255) NOT NULL, "FirstName" VARCHAR(255), - "LastName" VARCHAR(255), + "LastName" VARCHAR(255), + "AccountId" VARCHAR(255), PRIMARY KEY (id) ); -INSERT INTO "contacts" VALUES("Contact-1",'Alpha','gamma'); -INSERT INTO "contacts" VALUES("Contact-2",'Temp','Bluth'); +INSERT INTO "contacts" VALUES("Contact-1",'Alpha','gamma', 'Account-2'); +INSERT INTO "contacts" VALUES("Contact-2",'Temp','Bluth', 'Account-1'); CREATE TABLE "events" ( id VARCHAR(255) NOT NULL, diff --git a/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups_select.yml b/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups_select.yml new file mode 100644 index 0000000000..4b37f491eb --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups_select.yml @@ -0,0 +1,48 @@ +Insert Account: + sf_object: Account + table: accounts + api: rest + fields: + - Name + - AccountNumber + +Insert Lead: + sf_object: Lead + table: leads + api: bulk + fields: + - LastName + +Select Contact: + sf_object: Contact + table: contacts + api: bulk + action: select + select_options: + strategy: similarity + priority_fields: + - FirstName + - AccountId + fields: + - FirstName + - LastName + lookups: + AccountId: + table: accounts + +Select Event: + sf_object: Event + table: events + api: rest + action: select + select_options: + strategy: similarity + priority_fields: + - WhoId + fields: + - Subject + lookups: + WhoId: + table: + - contacts + - leads diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py new file mode 100644 index 0000000000..a0b5a3fcad --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -0,0 +1,1006 @@ +import pandas as pd +import pytest + +from cumulusci.tasks.bulkdata.select_utils import ( + SelectOperationExecutor, + SelectStrategy, + add_limit_offset_to_user_filter, + annoy_post_process, + calculate_levenshtein_distance, + determine_field_types, + find_closest_record, + levenshtein_distance, + reorder_records, + replace_empty_strings_with_missing, + split_and_filter_fields, + vectorize_records, +) + + +# Test Cases for standard_generate_query +def test_standard_generate_query_with_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS + limit = 5 + offset = 2 + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset + ) + + assert "WHERE" in query # Ensure WHERE clause is included + assert f"LIMIT {limit}" in query + assert f"OFFSET {offset}" in query + assert fields == ["Id"] + + +def test_standard_generate_query_without_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + sobject = "Contact" # Assuming no declaration for this object + limit = 3 + offset = None + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset + ) + + assert "WHERE" not in query # No WHERE clause should be present + assert f"LIMIT {limit}" in query + assert "OFFSET" not in query + assert fields == ["Id"] + + +def test_standard_generate_query_with_user_filter(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + sobject = "Contact" # Assuming no declaration for this object + limit = 3 + offset = None + user_filter = "WHERE Name IN ('Sample Contact')" + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], user_filter=user_filter, limit=limit, offset=offset + ) + + assert "WHERE" in query + assert "Sample Contact" in query + assert "LIMIT" in query + assert "OFFSET" not in query + assert fields == ["Id"] + + +# Test Cases for random generate query +def test_random_generate_query_with_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) + sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS + limit = 5 + offset = 2 + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset + ) + + assert "WHERE" in query # Ensure WHERE clause is included + assert f"LIMIT {limit}" in query + assert f"OFFSET {offset}" in query + assert fields == ["Id"] + + +def test_random_generate_query_without_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) + sobject = "Contact" # Assuming no declaration for this object + limit = 3 + offset = None + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset + ) + + assert "WHERE" not in query # No WHERE clause should be present + assert f"LIMIT {limit}" in query + assert "OFFSET" not in query + assert fields == ["Id"] + + +# Test Cases for standard_post_process +def test_standard_post_process_with_records(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + records = [["001"], ["002"], ["003"]] + num_records = 3 + sobject = "Contact" + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, + ) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + assert all(record["id"] in ["001", "002", "003"] for record in selected_records) + + +def test_standard_post_process_with_fewer_records(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + records = [["001"]] + num_records = 3 + sobject = "Opportunity" + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, + ) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + # Check if records are repeated to match num_records + assert selected_records.count({"id": "001", "success": True, "created": False}) == 3 + + +def test_standard_post_process_with_no_records(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + records = [] + num_records = 2 + sobject = "Lead" + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, + ) + + assert selected_records == [] + assert error_message == f"No records found for {sobject} in the target org." + + +# Test cases for Random Post Process +def test_random_post_process_with_records(): + select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) + records = [["001"], ["002"], ["003"]] + num_records = 3 + sobject = "Contact" + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, + ) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + + +def test_random_post_process_with_no_records(): + select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) + records = [] + num_records = 2 + sobject = "Lead" + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, + ) + + assert selected_records == [] + assert error_message == f"No records found for {sobject} in the target org." + + +# Test Cases for Similarity Generate Query +def test_similarity_generate_query_with_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS + limit = 5 + offset = 2 + query, fields = select_operator.select_generate_query( + sobject, ["Name"], [], limit, offset + ) + + assert "WHERE" in query # Ensure WHERE clause is included + assert fields == ["Id", "Name"] + assert f"LIMIT {limit}" in query + assert f"OFFSET {offset}" in query + + +def test_similarity_generate_query_without_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + sobject = "Contact" # Assuming no declaration for this object + limit = 3 + offset = None + query, fields = select_operator.select_generate_query( + sobject, ["Name"], [], limit, offset + ) + + assert "WHERE" not in query # No WHERE clause should be present + assert fields == ["Id", "Name"] + assert f"LIMIT {limit}" in query + assert "OFFSET" not in query + + +def test_similarity_generate_query_with_nested_fields(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + sobject = "Event" # Assuming no declaration for this object + limit = 3 + offset = None + fields = [ + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + ] + query, query_fields = select_operator.select_generate_query( + sobject, fields, [], limit, offset + ) + + assert "WHERE" not in query # No WHERE clause should be present + assert query_fields == [ + "Id", + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + ] + assert f"LIMIT {limit}" in query + assert "TYPEOF Who" in query + assert "WHEN Contact" in query + assert "WHEN Lead" in query + assert "OFFSET" not in query + + +def test_random_generate_query_with_user_filter(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + sobject = "Contact" # Assuming no declaration for this object + limit = 3 + offset = None + user_filter = "WHERE Name IN ('Sample Contact')" + query, fields = select_operator.select_generate_query( + sobject=sobject, + fields=["Name"], + user_filter=user_filter, + limit=limit, + offset=offset, + ) + + assert "WHERE" in query + assert "Sample Contact" in query + assert "LIMIT" in query + assert "OFFSET" not in query + assert fields == ["Id", "Name"] + + +def test_levenshtein_distance(): + assert levenshtein_distance("kitten", "kitten") == 0 # Identical strings + assert levenshtein_distance("kitten", "sitten") == 1 # One substitution + assert levenshtein_distance("kitten", "kitte") == 1 # One deletion + assert levenshtein_distance("kitten", "sittin") == 2 # Two substitutions + assert levenshtein_distance("kitten", "dog") == 6 # Completely different strings + assert levenshtein_distance("kitten", "") == 6 # One string is empty + assert levenshtein_distance("", "") == 0 # Both strings are empty + assert levenshtein_distance("Kitten", "kitten") == 1 # Case sensitivity + assert levenshtein_distance("kit ten", "kitten") == 1 # Strings with spaces + assert ( + levenshtein_distance("levenshtein", "meilenstein") == 4 + ) # Longer strings with multiple differences + + +def test_find_closest_record_different_weights(): + load_record = ["hello", "world"] + query_records = [ + ["record1", "hello", "word"], # Levenshtein distance = 1 + ["record2", "hullo", "word"], # Levenshtein distance = 1 + ["record3", "hello", "word"], # Levenshtein distance = 1 + ] + weights = [2.0, 0.5] + + # With different weights, the first field will have more impact + closest_record, _ = find_closest_record(load_record, query_records, weights) + assert closest_record == [ + "record1", + "hello", + "word", + ], "The closest record should be 'record1'." + + +def test_find_closest_record_basic(): + load_record = ["hello", "world"] + query_records = [ + ["record1", "hello", "word"], # Levenshtein distance = 1 + ["record2", "hullo", "word"], # Levenshtein distance = 1 + ["record3", "hello", "word"], # Levenshtein distance = 1 + ] + weights = [1.0, 1.0] + + closest_record, _ = find_closest_record(load_record, query_records, weights) + assert closest_record == [ + "record1", + "hello", + "word", + ], "The closest record should be 'record1'." + + +def test_find_closest_record_multiple_matches(): + load_record = ["cat", "dog"] + query_records = [ + ["record1", "bat", "dog"], # Levenshtein distance = 1 + ["record2", "cat", "dog"], # Levenshtein distance = 0 + ["record3", "dog", "cat"], # Levenshtein distance = 3 + ] + weights = [1.0, 1.0] + + closest_record, _ = find_closest_record(load_record, query_records, weights) + assert closest_record == [ + "record2", + "cat", + "dog", + ], "The closest record should be 'record2'." + + +def test_similarity_post_process_with_records(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + num_records = 1 + sobject = "Contact" + load_records = [["Tom Cruise", "62", "Actor"]] + query_records = [ + ["001", "Bob Hanks", "62", "Actor"], + ["002", "Tom Cruise", "63", "Actor"], # Slight difference + ["003", "Jennifer Aniston", "30", "Actress"], + ] + + weights = [1.0, 1.0, 1.0] # Adjust weights to match your data structure + + selected_records, _, error_message = select_operator.select_post_process( + load_records=load_records, + query_records=query_records, + num_records=num_records, + sobject=sobject, + weights=weights, + fields=["Name", "Age", "Occupation"], + threshold=None, + ) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + x = [record["id"] for record in selected_records] + print(x) + assert all(record["id"] in ["002"] for record in selected_records) + + +def test_similarity_post_process_with_no_records(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + records = [] + num_records = 2 + sobject = "Lead" + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[1, 1, 1], + fields=[], + threshold=None, + ) + + assert selected_records == [] + assert error_message == f"No records found for {sobject} in the target org." + + +def test_calculate_levenshtein_distance_basic(): + record1 = ["hello", "world"] + record2 = ["hullo", "word"] + weights = [1.0, 1.0] + + # Expected distance based on simple Levenshtein distances + # Levenshtein("hello", "hullo") = 1, Levenshtein("world", "word") = 1 + expected_distance = (1 / 5 * 1.0 + 1 / 5 * 1.0) / 2 # Averaged over two fields + + result = calculate_levenshtein_distance(record1, record2, weights) + assert result == pytest.approx( + expected_distance + ), "Basic distance calculation failed." + + # Empty fields + record1 = ["hello", ""] + record2 = ["hullo", ""] + weights = [1.0, 1.0] + + # Expected distance based on simple Levenshtein distances + # Levenshtein("hello", "hullo") = 1, Levenshtein("", "") = 0 + expected_distance = (1 / 5 * 1.0 + 0 * 1.0) / 2 # Averaged over two fields + + result = calculate_levenshtein_distance(record1, record2, weights) + assert result == pytest.approx( + expected_distance + ), "Basic distance calculation with empty fields failed." + + # Partial empty fields + record1 = ["hello", "world"] + record2 = ["hullo", ""] + weights = [1.0, 1.0] + + # Expected distance based on simple Levenshtein distances + # Levenshtein("hello", "hullo") = 1, Levenshtein("world", "") = 5 + expected_distance = ( + 1 / 5 * 1.0 + 5 / 5 * 0.05 * 1.0 + ) / 2 # Averaged over two fields + + result = calculate_levenshtein_distance(record1, record2, weights) + assert result == pytest.approx( + expected_distance + ), "Basic distance calculation with partial empty fields failed." + + +def test_calculate_levenshtein_distance_weighted(): + record1 = ["cat", "dog"] + record2 = ["bat", "fog"] + weights = [2.0, 0.5] + + # Levenshtein("cat", "bat") = 1, Levenshtein("dog", "fog") = 1 + expected_distance = ( + 1 / 3 * 2.0 + 1 / 3 * 0.5 + ) / 2.5 # Weighted average over two fields + + result = calculate_levenshtein_distance(record1, record2, weights) + assert result == pytest.approx( + expected_distance + ), "Weighted distance calculation failed." + + +def test_calculate_levenshtein_distance_records_length_doesnt_match(): + record1 = ["cat", "dog", "cow"] + record2 = ["bat", "fog"] + weights = [2.0, 0.5] + + with pytest.raises(ValueError) as e: + calculate_levenshtein_distance(record1, record2, weights) + assert "Records must have the same number of fields." in str(e.value) + + +def test_calculate_levenshtein_distance_weights_length_doesnt_match(): + record1 = ["cat", "dog"] + record2 = ["bat", "fog"] + weights = [2.0, 0.5, 3.0] + + with pytest.raises(ValueError) as e: + calculate_levenshtein_distance(record1, record2, weights) + assert "Records must be same size as fields (weights)." in str(e.value) + + +def test_replace_empty_strings_with_missing(): + # Case 1: Normal case with some empty strings + records = [ + ["Alice", "", "New York"], + ["Bob", "Engineer", ""], + ["", "Teacher", "Chicago"], + ] + expected = [ + ["Alice", "missing", "New York"], + ["Bob", "Engineer", "missing"], + ["missing", "Teacher", "Chicago"], + ] + assert replace_empty_strings_with_missing(records) == expected + + # Case 2: No empty strings, so the output should be the same as input + records = [["Alice", "Manager", "New York"], ["Bob", "Engineer", "San Francisco"]] + expected = [["Alice", "Manager", "New York"], ["Bob", "Engineer", "San Francisco"]] + assert replace_empty_strings_with_missing(records) == expected + + # Case 3: List with all empty strings + records = [["", "", ""], ["", "", ""]] + expected = [["missing", "missing", "missing"], ["missing", "missing", "missing"]] + assert replace_empty_strings_with_missing(records) == expected + + # Case 4: Empty list (should return an empty list) + records = [] + expected = [] + assert replace_empty_strings_with_missing(records) == expected + + # Case 5: List with some empty sublists + records = [[], ["Alice", ""], []] + expected = [[], ["Alice", "missing"], []] + assert replace_empty_strings_with_missing(records) == expected + + +def test_all_numeric_columns(): + df = pd.DataFrame({"A": [1, 2, 3], "B": [4.5, 5.5, 6.5]}) + weights = [0.1, 0.2] + expected_output = ( + ["A", "B"], # numerical_features + [], # boolean_features + [], # categorical_features + [0.1, 0.2], # numerical_weights + [], # boolean_weights + [], # categorical_weights + ) + assert determine_field_types(df, weights) == expected_output + + +def test_all_boolean_columns(): + df = pd.DataFrame({"A": ["true", "false", "true"], "B": ["false", "true", "false"]}) + weights = [0.3, 0.4] + expected_output = ( + [], # numerical_features + ["A", "B"], # boolean_features + [], # categorical_features + [], # numerical_weights + [0.3, 0.4], # boolean_weights + [], # categorical_weights + ) + assert determine_field_types(df, weights) == expected_output + + +def test_all_categorical_columns(): + df = pd.DataFrame( + {"A": ["apple", "banana", "cherry"], "B": ["dog", "cat", "mouse"]} + ) + weights = [0.5, 0.6] + expected_output = ( + [], # numerical_features + [], # boolean_features + ["A", "B"], # categorical_features + [], # numerical_weights + [], # boolean_weights + [0.5, 0.6], # categorical_weights + ) + assert determine_field_types(df, weights) == expected_output + + +def test_mixed_types(): + df = pd.DataFrame( + { + "A": [1, 2, 3], + "B": ["true", "false", "true"], + "C": ["apple", "banana", "cherry"], + } + ) + weights = [0.7, 0.8, 0.9] + expected_output = ( + ["A"], # numerical_features + ["B"], # boolean_features + ["C"], # categorical_features + [0.7], # numerical_weights + [0.8], # boolean_weights + [0.9], # categorical_weights + ) + assert determine_field_types(df, weights) == expected_output + + +def test_vectorize_records_mixed_numerical_boolean_categorical(): + # Test data with mixed types: numerical and categorical only + db_records = [["1.0", "true", "apple"], ["2.0", "false", "banana"]] + query_records = [["1.5", "true", "apple"], ["2.5", "false", "cherry"]] + weights = [1.0, 1.0, 1.0] # Equal weights for numerical and categorical columns + hash_features = 4 # Number of hashing vectorizer features for categorical columns + + final_db_vectors, final_query_vectors = vectorize_records( + db_records, query_records, hash_features, weights + ) + + # Check the shape of the output vectors + assert final_db_vectors.shape[0] == len(db_records), "DB vectors row count mismatch" + assert final_query_vectors.shape[0] == len( + query_records + ), "Query vectors row count mismatch" + + # Expected dimensions: numerical (1) + categorical hashed features (4) + expected_feature_count = 2 + hash_features + assert ( + final_db_vectors.shape[1] == expected_feature_count + ), "DB vectors column count mismatch" + assert ( + final_query_vectors.shape[1] == expected_feature_count + ), "Query vectors column count mismatch" + + +def test_annoy_post_process(): + # Test data + load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] + query_records = [["q1", "Alice", "Engineer"], ["q2", "Charlie", "Artist"]] + weights = [1.0, 1.0, 1.0] # Example weights + + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=["Name", "Occupation"], + threshold=None, + ) + + # Assert the closest records + assert ( + len(closest_records) == 2 + ) # We expect two results (one for each query record) + assert ( + closest_records[0]["id"] == "q1" + ) # The first query record should match the first load record + + # No errors expected + assert not insert_records + + +def test_annoy_post_process__insert_records(): + # Test data + load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] + query_records = [["q1", "Alice", "Engineer"], ["q2", "Charlie", "Artist"]] + weights = [1.0, 1.0, 1.0] # Example weights + threshold = 0.3 + + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=["Name", "Occupation"], + threshold=threshold, + ) + + # Assert the closest records + assert len(closest_records) == 2 # We expect two results (one record and one None) + assert ( + closest_records[0]["id"] == "q1" + ) # The first query record should match the first load record + assert closest_records[1] is None # The second query record should be None + assert insert_records[0] == [ + "Bob", + "Doctor", + ] # The first insert record should match the second load record + + +def test_annoy_post_process__no_query_records(): + # Test data + load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] + query_records = [] + weights = [1.0, 1.0, 1.0] # Example weights + threshold = 0.3 + + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=["Name", "Occupation"], + threshold=threshold, + ) + + # Assert the closest records + assert len(closest_records) == 2 # We expect two results (both None) + assert all(rec is None for rec in closest_records) # Both should be None + assert insert_records[0] == [ + "Alice", + "Engineer", + ] # The first insert record should match the second load record + assert insert_records[1] == [ + "Bob", + "Doctor", + ] # The first insert record should match the second load record + + +def test_annoy_post_process__insert_records_with_polymorphic_fields(): + # Test data + load_records = [ + ["Alice", "Engineer", "Alice_Contact", "abcd1234"], + ["Bob", "Doctor", "Bob_Contact", "qwer1234"], + ] + query_records = [ + ["q1", "Alice", "Engineer", "Alice_Contact"], + ["q2", "Charlie", "Artist", "Charlie_Contact"], + ] + weights = [1.0, 1.0, 1.0, 1.0] # Example weights + threshold = 0.3 + all_fields = ["Name", "Occupation", "Contact.Name", "ContactId"] + + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=all_fields, + threshold=threshold, + ) + + # Assert the closest records + assert len(closest_records) == 2 # We expect two results (one record and one None) + assert ( + closest_records[0]["id"] == "q1" + ) # The first query record should match the first load record + assert closest_records[1] is None # The second query record should be None + assert insert_records[0] == [ + "Bob", + "Doctor", + "qwer1234", + ] # The first insert record should match the second load record + + +def test_single_record_match_annoy_post_process(): + # Mock data where only the first query record matches the first load record + load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] + query_records = [["q1", "Alice", "Engineer"]] + weights = [1.0, 1.0, 1.0] + + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=["Name", "Occupation"], + threshold=None, + ) + + # Both the load records should be matched with the only query record we have + assert len(closest_records) == 2 + assert closest_records[0]["id"] == "q1" + assert not insert_records + + +@pytest.mark.parametrize( + "filter_clause, limit_clause, offset_clause, expected", + [ + # Test: No existing LIMIT/OFFSET and no new clauses + ("SELECT * FROM users", None, None, " SELECT * FROM users"), + # Test: Existing LIMIT and no new limit provided + ("SELECT * FROM users LIMIT 100", None, None, "SELECT * FROM users LIMIT 100"), + # Test: Existing OFFSET and no new offset provided + ("SELECT * FROM users OFFSET 20", None, None, "SELECT * FROM users OFFSET 20"), + # Test: Existing LIMIT/OFFSET and new clauses provided + ( + "SELECT * FROM users LIMIT 100 OFFSET 20", + 50, + 10, + "SELECT * FROM users LIMIT 50 OFFSET 30", + ), + # Test: Existing LIMIT, new limit larger than existing (should keep the smaller one) + ("SELECT * FROM users LIMIT 100", 150, None, "SELECT * FROM users LIMIT 100"), + # Test: New limit smaller than existing (should use the new one) + ("SELECT * FROM users LIMIT 100", 50, None, "SELECT * FROM users LIMIT 50"), + # Test: Existing OFFSET, adding a new offset (should sum the offsets) + ("SELECT * FROM users OFFSET 20", None, 30, "SELECT * FROM users OFFSET 50"), + # Test: Existing LIMIT/OFFSET and new values set to None + ( + "SELECT * FROM users LIMIT 100 OFFSET 20", + None, + None, + "SELECT * FROM users LIMIT 100 OFFSET 20", + ), + # Test: Removing existing LIMIT and adding a new one + ("SELECT * FROM users LIMIT 200", 50, None, "SELECT * FROM users LIMIT 50"), + # Test: Removing existing OFFSET and adding a new one + ("SELECT * FROM users OFFSET 40", None, 20, "SELECT * FROM users OFFSET 60"), + # Edge case: Filter clause with mixed cases + ( + "SELECT * FROM users LiMiT 100 oFfSeT 20", + 50, + 10, + "SELECT * FROM users LIMIT 50 OFFSET 30", + ), + # Test: Filter clause with trailing/leading spaces + ( + " SELECT * FROM users LIMIT 100 OFFSET 20 ", + 50, + 10, + "SELECT * FROM users LIMIT 50 OFFSET 30", + ), + ], +) +def test_add_limit_offset_to_user_filter( + filter_clause, limit_clause, offset_clause, expected +): + result = add_limit_offset_to_user_filter(filter_clause, limit_clause, offset_clause) + assert result.strip() == expected.strip() + + +def test_reorder_records_basic_reordering(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["job", "name"] + + expected = [ + ["Engineer", "Alice"], + ["Designer", "Bob"], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_partial_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["age"] + + expected = [ + [30], + [25], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_missing_fields_in_new_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["nonexistent", "job"] + + expected = [ + ["Engineer"], + ["Designer"], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_empty_records(): + records = [] + original_fields = ["name", "age", "job"] + new_fields = ["job", "name"] + + expected = [] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_empty_new_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = [] + + expected = [ + [], + [], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_empty_original_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = [] + new_fields = ["job", "name"] + + with pytest.raises(KeyError): + reorder_records(records, original_fields, new_fields) + + +def test_reorder_records_no_common_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["nonexistent_field"] + + expected = [ + [], + [], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_duplicate_fields_in_new_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["job", "job", "name"] + + expected = [ + ["Engineer", "Engineer", "Alice"], + ["Designer", "Designer", "Bob"], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_all_fields_in_order(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["name", "age", "job"] + + expected = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_split_and_filter_fields_basic_case(): + fields = [ + "Account.Name", + "Account.Industry", + "Contact.Name", + "AccountId", + "ContactId", + "CreatedDate", + ] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == ["AccountId", "ContactId", "CreatedDate"] + assert select_fields == [ + "Account.Name", + "Account.Industry", + "Contact.Name", + "CreatedDate", + ] + + +def test_split_and_filter_fields_all_non_lookup_fields(): + fields = ["Name", "CreatedDate"] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == ["Name", "CreatedDate"] + assert select_fields == fields + + +def test_split_and_filter_fields_all_lookup_fields(): + fields = ["Account.Name", "Account.Industry", "Contact.Name"] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == [] + assert select_fields == fields + + +def test_split_and_filter_fields_empty_fields(): + fields = [] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == [] + assert select_fields == [] + + +def test_split_and_filter_fields_single_non_lookup_field(): + fields = ["Id"] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == ["Id"] + assert select_fields == ["Id"] + + +def test_split_and_filter_fields_single_lookup_field(): + fields = ["Account.Name"] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == [] + assert select_fields == ["Account.Name"] + + +def test_split_and_filter_fields_multiple_unique_lookups(): + fields = [ + "Account.Name", + "Account.Industry", + "Contact.Email", + "Contact.Phone", + "Id", + ] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == ["Id"] + assert ( + select_fields == fields + ) # No filtering applied since all components are unique diff --git a/cumulusci/tasks/bulkdata/tests/test_snowfakery.py b/cumulusci/tasks/bulkdata/tests/test_snowfakery.py index de87c5503f..daa0fa2ef4 100644 --- a/cumulusci/tasks/bulkdata/tests/test_snowfakery.py +++ b/cumulusci/tasks/bulkdata/tests/test_snowfakery.py @@ -783,12 +783,11 @@ def test_explicit_channel_declarations(self, mock_load_data, create_task): "recipe": Path(__file__).parent / "snowfakery/simple_snowfakery.recipe.yml", "run_until_recipe_repeated": 15, - "recipe_options": {"xyzzy": "Nothing happens", "some_number": 42}, "loading_rules": Path(__file__).parent / "snowfakery/simple_snowfakery_channels.load.yml", }, ) - with mock.patch.object( + with pytest.warns(UserWarning), mock.patch.object( task.project_config, "keychain", DummyKeychain() ) as keychain: @@ -833,7 +832,6 @@ def test_serial_mode(self, mock_load_data, create_task): "recipe": Path(__file__).parent / "snowfakery/simple_snowfakery.recipe.yml", "run_until_recipe_repeated": 15, - "recipe_options": {"xyzzy": "Nothing happens", "some_number": 42}, "bulk_mode": "Serial", }, ) diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index fc8cea7013..e94e91f226 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -1,5 +1,6 @@ import io import json +from itertools import tee from unittest import mock import pytest @@ -7,7 +8,10 @@ from cumulusci.core.exceptions import BulkDataException from cumulusci.tasks.bulkdata.load import LoadData +from cumulusci.tasks.bulkdata.select_utils import SelectStrategy from cumulusci.tasks.bulkdata.step import ( + HIGH_PRIORITY_VALUE, + LOW_PRIORITY_VALUE, BulkApiDmlOperation, BulkApiQueryOperation, BulkJobMixin, @@ -18,7 +22,10 @@ DataOperationType, RestApiDmlOperation, RestApiQueryOperation, + assign_weights, download_file, + extract_flattened_headers, + flatten_record, get_dml_operation, get_query_operation, ) @@ -534,242 +541,1894 @@ def test_get_prev_record_values(self): ) step.bulk.get_all_results_for_query_batch.assert_called_once_with("BATCH_ID") - def test_batch(self): + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_standard_strategy_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation context = mock.Mock() - step = BulkApiDmlOperation( sobject="Contact", - operation=DataOperationType.INSERT, - api_options={"batch_size": 2}, + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, context=context, fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + content_type="JSON", ) - records = iter([["Test"], ["Test2"], ["Test3"]]) - results = list(step._batch(records, n=2)) + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] - assert len(results) == 2 - assert list(results[0]) == [ - '"LastName"\r\n'.encode("utf-8"), - '"Test"\r\n'.encode("utf-8"), - '"Test2"\r\n'.encode("utf-8"), - ] - assert list(results[1]) == [ - '"LastName"\r\n'.encode("utf-8"), - '"Test3"\r\n'.encode("utf-8"), - ] + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO('[{"Id":"003000000000001"}]') - def test_batch__character_limit(self): - context = mock.Mock() + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 + ) + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_standard_strategy_failure__no_records(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() step = BulkApiDmlOperation( sobject="Contact", - operation=DataOperationType.INSERT, - api_options={"batch_size": 2}, + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, context=context, fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, ) - records = [["Test"], ["Test2"], ["Test3"]] + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] - csv_rows = [step._serialize_csv_record(step.fields)] - for r in records: - csv_rows.append(step._serialize_csv_record(r)) + # Mock the downloaded CSV content indicating no records found + download_mock.return_value = io.StringIO("[]") - char_limit = sum([len(r) for r in csv_rows]) - 1 + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) - # Ask for batches of three, but we - # should get batches of 2 back - results = list(step._batch(iter(records), n=3, char_limit=char_limit)) + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) - assert len(results) == 2 - assert list(results[0]) == [ - '"LastName"\r\n'.encode("utf-8"), - '"Test"\r\n'.encode("utf-8"), - '"Test2"\r\n'.encode("utf-8"), - ] - assert list(results[1]) == [ - '"LastName"\r\n'.encode("utf-8"), - '"Test3"\r\n'.encode("utf-8"), - ] + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 @mock.patch("cumulusci.tasks.bulkdata.step.download_file") - def test_get_results(self, download_mock): + def test_select_records_user_selection_filter_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation context = mock.Mock() - context.bulk.endpoint = "https://test" - download_mock.side_effect = [ - io.StringIO( - """id,success,created,error -003000000000001,true,true, -003000000000002,true,true,""" - ), - io.StringIO( - """id,success,created,error -003000000000003,false,false,error""" - ), - ] - step = BulkApiDmlOperation( sobject="Contact", - operation=DataOperationType.INSERT, - api_options={}, + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, context=context, fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter='WHERE LastName in ("Sample Name")', ) - step.job_id = "JOB" - step.batch_ids = ["BATCH1", "BATCH2"] - results = step.get_results() + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] - assert list(results) == [ - DataOperationResult("003000000000001", True, None, True), - DataOperationResult("003000000000002", True, None, True), - DataOperationResult(None, False, "error", False), - ] - download_mock.assert_has_calls( - [ - mock.call("https://test/job/JOB/batch/BATCH1/result", context.bulk), - mock.call("https://test/job/JOB/batch/BATCH2/result", context.bulk), - ] + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO('[{"Id":"003000000000001"}]') + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 ) @mock.patch("cumulusci.tasks.bulkdata.step.download_file") - def test_get_results__failure(self, download_mock): + def test_select_records_user_selection_filter_order_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation context = mock.Mock() - context.bulk.endpoint = "https://test" - download_mock.return_value.side_effect = Exception + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter="ORDER BY CreatedDate", + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO( + '[{"Id":"003000000000003"}, {"Id":"003000000000001"}, {"Id":"003000000000002"}]' + ) + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results are in the order given by user query + assert results[0].id == "003000000000003" + assert results[1].id == "003000000000001" + assert results[2].id == "003000000000002" + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_user_selection_filter_failure(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() step = BulkApiDmlOperation( sobject="Contact", - operation=DataOperationType.INSERT, - api_options={}, + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, context=context, fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter='WHERE LastName in ("Sample Name")', ) - step.job_id = "JOB" - step.batch_ids = ["BATCH1", "BATCH2"] + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + download_mock.side_effect = BulkDataException("MALFORMED QUERY") + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() with pytest.raises(BulkDataException): - list(step.get_results()) + step.select_records(records) @mock.patch("cumulusci.tasks.bulkdata.step.download_file") - def test_end_to_end(self, download_mock): + def test_select_records_similarity_strategy_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation context = mock.Mock() - context.bulk.endpoint = "https://test" - context.bulk.create_job.return_value = "JOB" - context.bulk.post_batch.side_effect = ["BATCH1", "BATCH2"] + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record download_mock.return_value = io.StringIO( - """id,success,created,error -003000000000001,true,true, -003000000000002,true,true, -003000000000003,false,false,error""" + """[{"Id":"003000000000001", "Name":"Jawad", "Email":"mjawadtp@example.com"}, {"Id":"003000000000002", "Name":"Aditya", "Email":"aditya@example.com"}, {"Id":"003000000000003", "Name":"Tom", "Email":"tom@example.com"}]""" + ) + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=False + ) + ) + == 1 ) + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_failure__no_records( + self, download_mock + ): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() step = BulkApiDmlOperation( sobject="Contact", - operation=DataOperationType.INSERT, - api_options={}, + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, context=context, - fields=["LastName"], + fields=["Id", "Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content indicating no records found + download_mock.return_value = io.StringIO("[]") + + # Mock the _wait_for_job method to simulate a successful job step._wait_for_job = mock.Mock() step._wait_for_job.return_value = DataOperationJobResult( DataOperationStatus.SUCCESS, [], 0, 0 ) + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Execute the select_records operation step.start() - step.load_records(iter([["Test"], ["Test2"], ["Test3"]])) + step.select_records(records) step.end() - assert step.job_result.status is DataOperationStatus.SUCCESS - results = step.get_results() - - assert list(results) == [ - DataOperationResult("003000000000001", True, None, True), - DataOperationResult("003000000000002", True, None, True), - DataOperationResult(None, False, "error", False), - ] - + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 -class TestRestApiQueryOperation: - def test_query(self): + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_parent_level_records__polymorphic( + self, download_mock + ): + mock_describe_calls() + # Set up mock context and BulkApiDmlOperation context = mock.Mock() - context.sf.query.return_value = { - "totalSize": 2, - "done": True, - "records": [ - { - "Id": "003000000000001", - "LastName": "Narvaez", - "Email": "wayne@example.com", - }, - {"Id": "003000000000002", "LastName": "De Vries", "Email": None}, + step = BulkApiDmlOperation( + sobject="Event", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=[ + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + "WhoId", ], - } + selection_strategy=SelectStrategy.SIMILARITY, + ) - query_op = RestApiQueryOperation( + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + download_mock.return_value = io.StringIO( + """[ + {"Id": "003000000000001", "Subject": "Sample Event 1", "Who":{ "attributes": {"type": "Contact"}, "Id": "abcd1234", "Name": "Sample Contact", "Email": "contact@example.com"}}, + { "Id": "003000000000002", "Subject": "Sample Event 2", "Who":{ "attributes": {"type": "Lead"}, "Id": "qwer1234", "Name": "Sample Lead", "Company": "Salesforce"}} + ]""" + ) + + records = iter( + [ + [ + "Sample Event 1", + "Sample Contact", + "contact@example.com", + "", + "", + "lkjh1234", + ], + ["Sample Event 2", "", "", "Sample Lead", "Salesforce", "poiu1234"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 2 # Expect 2 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert results[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + assert results[1] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_parent_level_records__non_polymorphic( + self, download_mock + ): + mock_describe_calls() + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( sobject="Contact", - fields=["Id", "LastName", "Email"], - api_options={}, + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, context=context, - query="SELECT Id, LastName, Email FROM Contact", + fields=["Name", "Account.Name", "Account.AccountNumber", "AccountId"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + download_mock.return_value = io.StringIO( + """[ + {"Id": "003000000000001", "Name": "Sample Contact 1", "Account":{ "attributes": {"type": "Account"}, "Id": "abcd1234", "Name": "Sample Account", "AccountNumber": 123456}}, + { "Id": "003000000000002", "Subject": "Sample Contact 2", "Account": null} + ]""" + ) + + records = iter( + [ + ["Sample Contact 3", "Sample Account", "123456", "poiu1234"], + ["Sample Contact 4", "", "", ""], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 2 # Expect 2 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert results[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + assert results[1] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_priority_fields(self, download_mock): + mock_describe_calls() + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step_1 = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=[ + "Name", + "Email", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ], + selection_strategy=SelectStrategy.SIMILARITY, + selection_priority_fields={"Name": "Name", "Email": "Email"}, + ) + + step_2 = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=[ + "Name", + "Email", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ], + selection_strategy=SelectStrategy.SIMILARITY, + selection_priority_fields={ + "Account.Name": "Account.Name", + "Account.AccountNumber": "Account.AccountNumber", + }, + ) + + # Mock Bulk API responses + step_1.bulk.endpoint = "https://test" + step_1.bulk.create_query_job.return_value = "JOB" + step_1.bulk.query.return_value = "BATCH" + step_1.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + step_2.bulk.endpoint = "https://test" + step_2.bulk.create_query_job.return_value = "JOB" + step_2.bulk.query.return_value = "BATCH" + step_2.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + sample_response = [ + { + "Id": "003000000000001", + "Name": "Bob The Builder", + "Email": "bob@yahoo.org", + "Account": { + "attributes": {"type": "Account"}, + "Id": "abcd1234", + "Name": "Jawad TP", + "AccountNumber": 567890, + }, + }, + { + "Id": "003000000000002", + "Name": "Tom Cruise", + "Email": "tom@exmaple.com", + "Account": { + "attributes": {"type": "Account"}, + "Id": "qwer1234", + "Name": "Aditya B", + "AccountNumber": 123456, + }, + }, + ] + + download_mock.side_effect = [ + io.StringIO(f"""{json.dumps(sample_response)}"""), + io.StringIO(f"""{json.dumps(sample_response)}"""), + ] + + records = iter( + [ + ["Bob The Builder", "bob@yahoo.org", "Aditya B", "123456", "poiu1234"], + ] + ) + records_1, records_2 = tee(records) + step_1.start() + step_1.select_records(records_1) + step_1.end() + + step_2.start() + step_2.select_records(records_2) + step_2.end() + + # Get the results and assert their properties + results_1 = list(step_1.get_results()) + results_2 = list(step_2.get_results()) + assert ( + len(results_1) == 1 + ) # Expect 1 results (matching the input records count) + assert ( + len(results_2) == 1 + ) # Expect 1 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + # Prioritizes Name and Email + assert results_1[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + # Prioritizes Account.Name and Account.AccountNumber + assert results_2[0] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_process_insert_records_success(self, download_mock): + # Mock context and insert records + context = mock.Mock() + insert_records = iter([["John", "Doe"], ["Jane", "Smith"]]) + selected_records = [None, None] + + # Mock insert fields splitting + insert_fields = ["FirstName", "LastName"] + with mock.patch( + "cumulusci.tasks.bulkdata.step.split_and_filter_fields", + return_value=(insert_fields, None), + ) as split_mock: + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=["FirstName", "LastName"], + ) + + # Mock Bulk API + step.bulk.endpoint = "https://test" + step.bulk.create_insert_job.return_value = "JOB" + step.bulk.get_insert_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with successful results + download_mock.return_value = io.StringIO( + "Id,Success,Created\n0011k00003E8xAaAAI,true,true\n0011k00003E8xAbAAJ,true,true\n" + ) + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + step._process_insert_records(insert_records, selected_records) + + # Assertions for split fields and sub-operation + split_mock.assert_called_once_with(fields=["FirstName", "LastName"]) + insert_step.start.assert_called_once() + insert_step.load_records.assert_called_once_with(insert_records) + insert_step.end.assert_called_once() + + # Validate the download file interactions + download_mock.assert_called_once_with( + "https://test/job/JOB/batch/BATCH1/result", insert_step.bulk + ) + + # Validate that selected_records is updated with insert results + assert selected_records == [ + {"id": "0011k00003E8xAaAAI", "success": True, "created": True}, + {"id": "0011k00003E8xAbAAJ", "success": True, "created": True}, + ] + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_process_insert_records_failure(self, download_mock): + # Mock context and insert records + context = mock.Mock() + insert_records = iter([["John", "Doe"], ["Jane", "Smith"]]) + selected_records = [None, None] + + # Mock insert fields splitting + insert_fields = ["FirstName", "LastName"] + with mock.patch( + "cumulusci.tasks.bulkdata.step.split_and_filter_fields", + return_value=(insert_fields, None), + ): + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=["FirstName", "LastName"], + ) + + # Mock failure during results download + download_mock.side_effect = Exception("Download failed") + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + with pytest.raises(BulkDataException) as excinfo: + step._process_insert_records(insert_records, selected_records) + + # Validate that the exception is raised with the correct message + assert "Failed to download results for batch BATCH1" in str( + excinfo.value + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy__insert_records(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + # Add step with threshold + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + threshold=0.3, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + select_results = io.StringIO( + """[{"Id":"003000000000001", "Name":"Jawad", "Email":"mjawadtp@example.com"}]""" + ) + insert_results = io.StringIO( + "Id,Success,Created\n003000000000002,true,true\n003000000000003,true,true\n" + ) + download_mock.side_effect = [select_results, insert_results] + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=True + ) + ) + == 1 + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy__insert_records__no_select_records( + self, download_mock + ): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + # Add step with threshold + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + threshold=0.3, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + select_results = io.StringIO("""[]""") + insert_results = io.StringIO( + "Id,Success,Created\n003000000000001,true,true\n003000000000002,true,true\n003000000000003,true,true\n" + ) + download_mock.side_effect = [select_results, insert_results] + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=True + ) + ) + == 1 + ) + + def test_batch(self): + context = mock.Mock() + + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={"batch_size": 2}, + context=context, + fields=["LastName"], + ) + + records = iter([["Test"], ["Test2"], ["Test3"]]) + results = list(step._batch(records, n=2)) + + assert len(results) == 2 + assert list(results[0]) == [ + '"LastName"\r\n'.encode("utf-8"), + '"Test"\r\n'.encode("utf-8"), + '"Test2"\r\n'.encode("utf-8"), + ] + assert list(results[1]) == [ + '"LastName"\r\n'.encode("utf-8"), + '"Test3"\r\n'.encode("utf-8"), + ] + + def test_batch__character_limit(self): + context = mock.Mock() + + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={"batch_size": 2}, + context=context, + fields=["LastName"], + ) + + records = [["Test"], ["Test2"], ["Test3"]] + + csv_rows = [step._serialize_csv_record(step.fields)] + for r in records: + csv_rows.append(step._serialize_csv_record(r)) + + char_limit = sum([len(r) for r in csv_rows]) - 1 + + # Ask for batches of three, but we + # should get batches of 2 back + results = list(step._batch(iter(records), n=3, char_limit=char_limit)) + + assert len(results) == 2 + assert list(results[0]) == [ + '"LastName"\r\n'.encode("utf-8"), + '"Test"\r\n'.encode("utf-8"), + '"Test2"\r\n'.encode("utf-8"), + ] + assert list(results[1]) == [ + '"LastName"\r\n'.encode("utf-8"), + '"Test3"\r\n'.encode("utf-8"), + ] + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_get_results(self, download_mock): + context = mock.Mock() + context.bulk.endpoint = "https://test" + download_mock.side_effect = [ + io.StringIO( + """id,success,created,error +003000000000001,true,true, +003000000000002,true,true,""" + ), + io.StringIO( + """id,success,created,error +003000000000003,false,false,error""" + ), + ] + + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={}, + context=context, + fields=["LastName"], + ) + step.job_id = "JOB" + step.batch_ids = ["BATCH1", "BATCH2"] + + results = step.get_results() + + assert list(results) == [ + DataOperationResult("003000000000001", True, None, True), + DataOperationResult("003000000000002", True, None, True), + DataOperationResult(None, False, "error", False), + ] + download_mock.assert_has_calls( + [ + mock.call("https://test/job/JOB/batch/BATCH1/result", context.bulk), + mock.call("https://test/job/JOB/batch/BATCH2/result", context.bulk), + ] + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_get_results__failure(self, download_mock): + context = mock.Mock() + context.bulk.endpoint = "https://test" + download_mock.return_value.side_effect = Exception + + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={}, + context=context, + fields=["LastName"], + ) + step.job_id = "JOB" + step.batch_ids = ["BATCH1", "BATCH2"] + + with pytest.raises(BulkDataException): + list(step.get_results()) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_end_to_end(self, download_mock): + context = mock.Mock() + context.bulk.endpoint = "https://test" + context.bulk.create_job.return_value = "JOB" + context.bulk.post_batch.side_effect = ["BATCH1", "BATCH2"] + download_mock.return_value = io.StringIO( + """id,success,created,error +003000000000001,true,true, +003000000000002,true,true, +003000000000003,false,false,error""" + ) + + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={}, + context=context, + fields=["LastName"], + ) + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + step.start() + step.load_records(iter([["Test"], ["Test2"], ["Test3"]])) + step.end() + + assert step.job_result.status is DataOperationStatus.SUCCESS + results = step.get_results() + + assert list(results) == [ + DataOperationResult("003000000000001", True, None, True), + DataOperationResult("003000000000002", True, None, True), + DataOperationResult(None, False, "error", False), + ] + + +class TestRestApiQueryOperation: + def test_query(self): + context = mock.Mock() + context.sf.query.return_value = { + "totalSize": 2, + "done": True, + "records": [ + { + "Id": "003000000000001", + "LastName": "Narvaez", + "Email": "wayne@example.com", + }, + {"Id": "003000000000002", "LastName": "De Vries", "Email": None}, + ], + } + + query_op = RestApiQueryOperation( + sobject="Contact", + fields=["Id", "LastName", "Email"], + api_options={}, + context=context, + query="SELECT Id, LastName, Email FROM Contact", + ) + + query_op.query() + + assert query_op.job_result == DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 2, 0 + ) + assert list(query_op.get_results()) == [ + ["003000000000001", "Narvaez", "wayne@example.com"], + ["003000000000002", "De Vries", ""], + ] + + def test_query_batches(self): + context = mock.Mock() + context.sf.query.return_value = { + "totalSize": 2, + "done": False, + "records": [ + { + "Id": "003000000000001", + "LastName": "Narvaez", + "Email": "wayne@example.com", + } + ], + "nextRecordsUrl": "test", + } + + context.sf.query_more.return_value = { + "totalSize": 2, + "done": True, + "records": [ + {"Id": "003000000000002", "LastName": "De Vries", "Email": None} + ], + } + + query_op = RestApiQueryOperation( + sobject="Contact", + fields=["Id", "LastName", "Email"], + api_options={}, + context=context, + query="SELECT Id, LastName, Email FROM Contact", + ) + + query_op.query() + + assert query_op.job_result == DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 2, 0 + ) + assert list(query_op.get_results()) == [ + ["003000000000001", "Narvaez", "wayne@example.com"], + ["003000000000002", "De Vries", ""], + ] + + +class TestRestApiDmlOperation: + @responses.activate + def test_insert_dml_operation(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + + recs = [["Fred", "Narvaez"], [None, "De Vries"], ["Hiroko", "Aito"]] + + dml_op = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={"batch_size": 2}, + context=task, + fields=["FirstName", "LastName"], + ) + + dml_op.start() + dml_op.load_records(iter(recs)) + dml_op.end() + + assert dml_op.job_result == DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 3, 0 + ) + assert list(dml_op.get_results()) == [ + DataOperationResult("003000000000001", True, "", True), + DataOperationResult("003000000000002", True, "", True), + DataOperationResult("003000000000003", True, "", True), + ] + + @responses.activate + def test_get_prev_record_values(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + ) + + results = { + "records": [ + {"LastName": "Test1", "Id": "Id1"}, + {"LastName": "Test2", "Id": "Id2"}, + ] + } + expected_record_values = [["Test1", "Id1"], ["Test2", "Id2"]] + expected_relevant_fields = ("Id", "LastName") + step.sf.query = mock.Mock() + step.sf.query.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + prev_record_values, relevant_fields = step.get_prev_record_values(records) + + assert sorted(map(sorted, prev_record_values)) == sorted( + map(sorted, expected_record_values) + ) + assert set(relevant_fields) == set(expected_relevant_fields) + + @responses.activate + def test_select_records_standard_strategy_success(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + ) + + results = { + "records": [ + {"Id": "003000000000001"}, + ], + "done": True, + } + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 + ) + + @responses.activate + def test_select_records_standard_strategy_success_pagination(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + ) + + # Set up pagination: First call returns done=False, second call returns done=True + step.sf.restful = mock.Mock( + side_effect=[ + { + "records": [{"Id": "003000000000001"}, {"Id": "003000000000002"}], + "done": False, # Pagination in progress + "nextRecordsUrl": "/services/data/vXX.X/query/next-records", + }, + ] + ) + + step.sf.query_more = mock.Mock( + side_effect=[ + {"records": [{"Id": "003000000000003"}], "done": True} # Final page + ] + ) + + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=False + ) + ) + == 1 + ) + + @responses.activate + def test_select_records_standard_strategy_failure__no_records(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + ) + + results = {"records": [], "done": True} + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 + + @responses.activate + def test_select_records_user_selection_filter_success(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter='WHERE LastName IN ("Sample Name")', + ) + + results = { + "records": [ + {"Id": "003000000000001"}, + ], + "done": True, + } + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 + ) + + @responses.activate + def test_select_records_user_selection_filter_order_success(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter="ORDER BY CreatedDate", + ) + + results = { + "records": [ + {"Id": "003000000000003"}, + {"Id": "003000000000001"}, + {"Id": "003000000000002"}, + ], + "done": True, + } + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results are in the order of user_query + assert results[0].id == "003000000000003" + assert results[1].id == "003000000000001" + assert results[2].id == "003000000000002" + + @responses.activate + def test_select_records_user_selection_filter_failure(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter="MALFORMED FILTER", # Applying malformed filter + ) + + step.sf.restful = mock.Mock() + step.sf.restful.side_effect = Exception("MALFORMED QUERY") + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + with pytest.raises(Exception): + step.select_records(records) + + @responses.activate + def test_select_records_similarity_strategy_success(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + results_first_call = { + "records": [ + { + "Id": "003000000000001", + "Name": "Jawad", + "Email": "mjawadtp@example.com", + }, + { + "Id": "003000000000002", + "Name": "Aditya", + "Email": "aditya@example.com", + }, + { + "Id": "003000000000003", + "Name": "Tom Cruise", + "Email": "tomcruise@example.com", + }, + ], + "done": True, + } + + # First call returns `results_first_call`, second call returns an empty list + step.sf.restful = mock.Mock( + side_effect=[results_first_call, {"records": [], "done": True}] + ) + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom Cruise", "tom@example.com"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=False + ) + ) + == 1 + ) + + @responses.activate + def test_select_records_similarity_strategy_failure__no_records(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + results = {"records": [], "done": True} + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results + records = iter( + [ + ["Id: 1", "Jawad", "mjawadtp@example.com"], + ["Id: 2", "Aditya", "aditya@example.com"], + ["Id: 2", "Tom", "tom@example.com"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 + + @responses.activate + def test_select_records_similarity_strategy_parent_level_records__polymorphic(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, ) - - query_op.query() - - assert query_op.job_result == DataOperationJobResult( - DataOperationStatus.SUCCESS, [], 2, 0 + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, ) - assert list(query_op.get_results()) == [ - ["003000000000001", "Narvaez", "wayne@example.com"], - ["003000000000002", "De Vries", ""], - ] - - def test_query_batches(self): - context = mock.Mock() - context.sf.query.return_value = { - "totalSize": 2, - "done": False, - "records": [ - { - "Id": "003000000000001", - "LastName": "Narvaez", - "Email": "wayne@example.com", - } + step = RestApiDmlOperation( + sobject="Event", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=task, + fields=[ + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + "WhoId", ], - "nextRecordsUrl": "test", - } + selection_strategy=SelectStrategy.SIMILARITY, + ) - context.sf.query_more.return_value = { - "totalSize": 2, - "done": True, - "records": [ - {"Id": "003000000000002", "LastName": "De Vries", "Email": None} - ], - } + step.sf.restful = mock.Mock( + side_effect=[ + { + "records": [ + { + "Id": "003000000000001", + "Subject": "Sample Event 1", + "Who": { + "attributes": {"type": "Contact"}, + "Id": "abcd1234", + "Name": "Sample Contact", + "Email": "contact@example.com", + }, + }, + { + "Id": "003000000000002", + "Subject": "Sample Event 2", + "Who": { + "attributes": {"type": "Lead"}, + "Id": "qwer1234", + "Name": "Sample Lead", + "Company": "Salesforce", + }, + }, + ], + "done": True, + }, + ] + ) - query_op = RestApiQueryOperation( - sobject="Contact", - fields=["Id", "LastName", "Email"], - api_options={}, - context=context, - query="SELECT Id, LastName, Email FROM Contact", + records = iter( + [ + [ + "Sample Event 1", + "Sample Contact", + "contact@example.com", + "", + "", + "poiu1234", + ], + ["Sample Event 2", "", "", "Sample Lead", "Salesforce", "lkjh1234"], + ] ) + step.start() + step.select_records(records) + step.end() - query_op.query() + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 2 # Expect 2 results (matching the input records count) - assert query_op.job_result == DataOperationJobResult( - DataOperationStatus.SUCCESS, [], 2, 0 + # Assert that all results have the expected ID, success, and created values + assert results[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + assert results[1] == DataOperationResult( + id="003000000000002", success=True, error="", created=False ) - assert list(query_op.get_results()) == [ - ["003000000000001", "Narvaez", "wayne@example.com"], - ["003000000000002", "De Vries", ""], - ] - -class TestRestApiDmlOperation: @responses.activate - def test_insert_dml_operation(self): + def test_select_records_similarity_strategy_parent_level_records__non_polymorphic( + self, + ): mock_describe_calls() task = _make_task( LoadData, @@ -798,34 +2457,66 @@ def test_insert_dml_operation(self): json=[{"id": "003000000000003", "success": True}], status=200, ) - - recs = [["Fred", "Narvaez"], [None, "De Vries"], ["Hiroko", "Aito"]] - - dml_op = RestApiDmlOperation( + step = RestApiDmlOperation( sobject="Contact", - operation=DataOperationType.INSERT, - api_options={"batch_size": 2}, + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, context=task, - fields=["FirstName", "LastName"], + fields=["Name", "Account.Name", "Account.AccountNumber", "AccountId"], + selection_strategy=SelectStrategy.SIMILARITY, ) - dml_op.start() - dml_op.load_records(iter(recs)) - dml_op.end() + step.sf.restful = mock.Mock( + side_effect=[ + { + "records": [ + { + "Id": "003000000000001", + "Name": "Sample Contact 1", + "Account": { + "attributes": {"type": "Account"}, + "Id": "abcd1234", + "Name": "Sample Account", + "AccountNumber": 123456, + }, + }, + { + "Id": "003000000000002", + "Name": "Sample Contact 2", + "Account": None, + }, + ], + "done": True, + }, + ] + ) - assert dml_op.job_result == DataOperationJobResult( - DataOperationStatus.SUCCESS, [], 3, 0 + records = iter( + [ + ["Sample Contact 3", "Sample Account", "123456", "poiu1234"], + ["Sample Contact 4", "", "", ""], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 2 # Expect 2 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert results[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + assert results[1] == DataOperationResult( + id="003000000000002", success=True, error="", created=False ) - assert list(dml_op.get_results()) == [ - DataOperationResult("003000000000001", True, "", True), - DataOperationResult("003000000000002", True, "", True), - DataOperationResult("003000000000003", True, "", True), - ] @responses.activate - def test_get_prev_record_values(self): + def test_select_records_similarity_strategy_priority_fields(self): mock_describe_calls() - task = _make_task( + task_1 = _make_task( LoadData, { "options": { @@ -834,8 +2525,20 @@ def test_get_prev_record_values(self): } }, ) - task.project_config.project__package__api_version = CURRENT_SF_API_VERSION - task._init_task() + task_1.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task_1._init_task() + + task_2 = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task_2.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task_2._init_task() responses.add( responses.POST, @@ -852,32 +2555,341 @@ def test_get_prev_record_values(self): json=[{"id": "003000000000003", "success": True}], status=200, ) + step_1 = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=task_1, + fields=[ + "Name", + "Email", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ], + selection_strategy=SelectStrategy.SIMILARITY, + selection_priority_fields={"Name": "Name", "Email": "Email"}, + ) + + step_2 = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=task_2, + fields=[ + "Name", + "Email", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ], + selection_strategy=SelectStrategy.SIMILARITY, + selection_priority_fields={ + "Account.Name": "Account.Name", + "Account.AccountNumber": "Account.AccountNumber", + }, + ) + + sample_response = [ + { + "records": [ + { + "Id": "003000000000001", + "Name": "Bob The Builder", + "Email": "bob@yahoo.org", + "Account": { + "attributes": {"type": "Account"}, + "Id": "abcd1234", + "Name": "Jawad TP", + "AccountNumber": 567890, + }, + }, + { + "Id": "003000000000002", + "Name": "Tom Cruise", + "Email": "tom@exmaple.com", + "Account": { + "attributes": {"type": "Account"}, + "Id": "qwer1234", + "Name": "Aditya B", + "AccountNumber": 123456, + }, + }, + ], + "done": True, + }, + ] + + step_1.sf.restful = mock.Mock(side_effect=sample_response) + step_2.sf.restful = mock.Mock(side_effect=sample_response) + + records = iter( + [ + ["Bob The Builder", "bob@yahoo.org", "Aditya B", "123456", "poiu1234"], + ] + ) + records_1, records_2 = tee(records) + step_1.start() + step_1.select_records(records_1) + step_1.end() + + step_2.start() + step_2.select_records(records_2) + step_2.end() + + # Get the results and assert their properties + results_1 = list(step_1.get_results()) + results_2 = list(step_2.get_results()) + assert ( + len(results_1) == 1 + ) # Expect 1 results (matching the input records count) + assert ( + len(results_2) == 1 + ) # Expect 1 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + # Prioritizes Name and Email + assert results_1[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + # Prioritizes Account.Name and Account.AccountNumber + assert results_2[0] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + + @responses.activate + def test_process_insert_records_success(self): + # Mock describe calls + mock_describe_calls() + + # Create a task and mock project config + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + # Prepare inputs + insert_records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom Cruise", "tomcruise@example.com"], + ] + ) + selected_records = [None, None, None] + + # Mock fields splitting + insert_fields = ["Name", "Email"] + with mock.patch( + "cumulusci.tasks.bulkdata.step.split_and_filter_fields", + return_value=(insert_fields, None), + ) as split_mock: + # Mock the instance of RestApiDmlOperation + mock_rest_api_dml_operation = mock.create_autospec( + RestApiDmlOperation, instance=True + ) + mock_rest_api_dml_operation.results = [ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + {"id": "003000000000003", "success": True}, + ] + + with mock.patch( + "cumulusci.tasks.bulkdata.step.RestApiDmlOperation", + return_value=mock_rest_api_dml_operation, + ): + # Call the function + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={"batch_size": 10}, + context=task, + fields=["Name", "Email"], + ) + step._process_insert_records(insert_records, selected_records) + + # Assert the mocked splitting is called + split_mock.assert_called_once_with(fields=["Name", "Email"]) + + # Validate that `selected_records` is updated correctly + assert selected_records == [ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + {"id": "003000000000003", "success": True}, + ] + + # Validate the operation sequence + mock_rest_api_dml_operation.start.assert_called_once() + mock_rest_api_dml_operation.load_records.assert_called_once_with( + insert_records + ) + mock_rest_api_dml_operation.end.assert_called_once() + + @responses.activate + def test_process_insert_records_failure(self): + # Mock describe calls + mock_describe_calls() + + # Create a task and mock project config + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + # Prepare inputs + insert_records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ] + ) + selected_records = [None, None] + + # Mock fields splitting + insert_fields = ["Name", "Email"] + with mock.patch( + "cumulusci.tasks.bulkdata.step.split_and_filter_fields", + return_value=(insert_fields, None), + ) as split_mock: + # Mock the instance of RestApiDmlOperation + mock_rest_api_dml_operation = mock.create_autospec( + RestApiDmlOperation, instance=True + ) + mock_rest_api_dml_operation.results = ( + None # Simulate no results due to an exception + ) + + # Simulate an exception during processing results + mock_rest_api_dml_operation.load_records.side_effect = BulkDataException( + "Simulated failure" + ) + + with mock.patch( + "cumulusci.tasks.bulkdata.step.RestApiDmlOperation", + return_value=mock_rest_api_dml_operation, + ): + # Call the function and verify that it raises the expected exception + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={"batch_size": 10}, + context=task, + fields=["Name", "Email"], + ) + with pytest.raises(BulkDataException): + step._process_insert_records(insert_records, selected_records) + + # Assert the mocked splitting is called + split_mock.assert_called_once_with(fields=["Name", "Email"]) + + # Validate that `selected_records` remains unchanged + assert selected_records == [None, None] + + # Validate the operation sequence + mock_rest_api_dml_operation.start.assert_called_once() + mock_rest_api_dml_operation.load_records.assert_called_once_with( + insert_records + ) + mock_rest_api_dml_operation.end.assert_not_called() + + @responses.activate + def test_select_records_similarity_strategy__insert_records(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + # Create step with threshold step = RestApiDmlOperation( sobject="Contact", operation=DataOperationType.UPSERT, - api_options={"batch_size": 10, "update_key": "LastName"}, + api_options={"batch_size": 10}, context=task, - fields=["LastName"], + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + threshold=0.3, ) - results = { + results_select_call = { "records": [ - {"LastName": "Test1", "Id": "Id1"}, - {"LastName": "Test2", "Id": "Id2"}, - ] + { + "Id": "003000000000001", + "Name": "Jawad", + "Email": "mjawadtp@example.com", + }, + ], + "done": True, } - expected_record_values = [["Test1", "Id1"], ["Test2", "Id2"]] - expected_relevant_fields = ("Id", "LastName") - step.sf.query = mock.Mock() - step.sf.query.return_value = results - records = iter([["Test1"], ["Test2"], ["Test3"]]) - prev_record_values, relevant_fields = step.get_prev_record_values(records) - assert sorted(map(sorted, prev_record_values)) == sorted( - map(sorted, expected_record_values) + results_insert_call = [ + {"id": "003000000000002", "success": True, "created": True}, + {"id": "003000000000003", "success": True, "created": True}, + ] + + step.sf.restful = mock.Mock( + side_effect=[results_select_call, results_insert_call] + ) + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom Cruise", "tom@example.com"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=True + ) + ) + == 1 ) - assert set(relevant_fields) == set(expected_relevant_fields) @responses.activate def test_insert_dml_operation__boolean_conversion(self): @@ -1355,6 +3367,8 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): context=context, api=DataApi.BULK, volume=1, + selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, ) assert op == bulk_dml.return_value @@ -1364,6 +3378,11 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): fields=["Name"], api_options={}, context=context, + selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, + selection_priority_fields=None, + content_type=None, + threshold=None, ) op = get_dml_operation( @@ -1374,6 +3393,8 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): context=context, api=DataApi.REST, volume=1, + selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, ) assert op == rest_dml.return_value @@ -1383,6 +3404,11 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): fields=["Name"], api_options={}, context=context, + selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, + selection_priority_fields=None, + content_type=None, + threshold=None, ) @mock.patch("cumulusci.tasks.bulkdata.step.BulkApiDmlOperation") @@ -1545,3 +3571,122 @@ def test_cleanup_date_strings__upsert_update(self, operation): "Name": "Bill", "attributes": {"type": "Test__c"}, }, json_out + + +@pytest.mark.parametrize( + "query_fields, expected", + [ + # Test with simple field names + (["Id", "Name", "Email"], ["Id", "Name", "Email"]), + # Test with TYPEOF fields (polymorphic fields) + ( + [ + "Subject", + { + "Who": [ + {"Contact": ["Name", "Email"]}, + {"Lead": ["Name", "Company"]}, + ] + }, + ], + [ + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + ], + ), + # Test with mixed simple and TYPEOF fields + ( + ["Subject", {"Who": [{"Contact": ["Email"]}]}, "Account.Name"], + ["Subject", "Who.Contact.Email", "Account.Name"], + ), + # Test with an empty list + ([], []), + ], +) +def test_extract_flattened_headers(query_fields, expected): + result = extract_flattened_headers(query_fields) + assert result == expected + + +@pytest.mark.parametrize( + "record, headers, expected", + [ + # Test with simple field matching + ( + {"Id": "001", "Name": "John Doe", "Email": "john@example.com"}, + ["Id", "Name", "Email"], + ["001", "John Doe", "john@example.com"], + ), + # Test with lookup fields and missing values + ( + { + "Who": { + "attributes": {"type": "Contact"}, + "Name": "Jane Doe", + "Email": "johndoe@org.com", + "Number": 10, + } + }, + ["Who.Contact.Name", "Who.Contact.Email", "Who.Contact.Number"], + ["Jane Doe", "johndoe@org.com", "10"], + ), + # Test with non-matching ref_obj type + ( + {"Who": {"attributes": {"type": "Contact"}, "Email": "jane@contact.com"}}, + ["Who.Lead.Email"], + [""], + ), + # Test with mixed fields and nested lookups + ( + { + "Who": {"attributes": {"type": "Lead"}, "Name": "John Doe"}, + "Email": "john@example.com", + }, + ["Who.Lead.Name", "Who.Lead.Company", "Email"], + ["John Doe", "", "john@example.com"], + ), + # Test with mixed fields and nested lookups + ( + { + "Who": {"attributes": {"type": "Lead"}, "Name": "John Doe"}, + "Email": "john@example.com", + }, + ["What.Account.Name"], + [""], + ), + # Test with empty record + ({}, ["Id", "Name"], ["", ""]), + ], +) +def test_flatten_record(record, headers, expected): + result = flatten_record(record, headers) + assert result == expected + + +@pytest.mark.parametrize( + "priority_fields, fields, expected", + [ + # Test with priority fields matching + ( + {"Id": "Id", "Name": "Name"}, + ["Id", "Name", "Email"], + [HIGH_PRIORITY_VALUE, HIGH_PRIORITY_VALUE, LOW_PRIORITY_VALUE], + ), + # Test with no priority fields provided + (None, ["Id", "Name", "Email"], [1, 1, 1]), + # Test with empty priority fields dictionary + ({}, ["Id", "Name", "Email"], [1, 1, 1]), + # Test with some fields not in priority_fields + ( + {"Id": "Id"}, + ["Id", "Name", "Email"], + [HIGH_PRIORITY_VALUE, LOW_PRIORITY_VALUE, LOW_PRIORITY_VALUE], + ), + ], +) +def test_assign_weights(priority_fields, fields, expected): + result = assign_weights(priority_fields, fields) + assert result == expected diff --git a/cumulusci/tasks/bulkdata/tests/utils.py b/cumulusci/tasks/bulkdata/tests/utils.py index 173f4c6122..c0db0f9515 100644 --- a/cumulusci/tasks/bulkdata/tests/utils.py +++ b/cumulusci/tasks/bulkdata/tests/utils.py @@ -98,6 +98,9 @@ def get_prev_record_values(self, records): def load_records(self, records): self.records.extend(records) + def select_records(self, records): + pass + def get_results(self): return iter(self.results) diff --git a/cumulusci/tasks/bulkdata/utils.py b/cumulusci/tasks/bulkdata/utils.py index 082277fb16..cee6a4ab66 100644 --- a/cumulusci/tasks/bulkdata/utils.py +++ b/cumulusci/tasks/bulkdata/utils.py @@ -5,15 +5,38 @@ from contextlib import contextmanager, nullcontext from pathlib import Path +from requests.structures import CaseInsensitiveDict as RequestsCaseInsensitiveDict from simple_salesforce import Salesforce from sqlalchemy import Boolean, Column, MetaData, Table, Unicode, inspect from sqlalchemy.engine.base import Connection from sqlalchemy.orm import Session, mapper +from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.utils.iterators import iterate_in_chunks +class DataApi(StrEnum): + """Enum defining requested Salesforce data API for an operation.""" + + BULK = "bulk" + REST = "rest" + SMART = "smart" + + +class CaseInsensitiveDict(RequestsCaseInsensitiveDict): + def __init__(self, *args, **kwargs): + self._canonical_keys = {} + super().__init__(*args, **kwargs) + + def canonical_key(self, name): + return self._canonical_keys[name.lower()] + + def __setitem__(self, key, value): + super().__setitem__(key, value) + self._canonical_keys[key.lower()] = key + + class SqlAlchemyMixin: logger: logging.Logger metadata: MetaData diff --git a/cumulusci/tasks/command.py b/cumulusci/tasks/command.py index 2de71f8577..1935fc3e6f 100644 --- a/cumulusci/tasks/command.py +++ b/cumulusci/tasks/command.py @@ -146,7 +146,7 @@ class SalesforceCommand(Command): """Execute a Command with SF credentials provided on the environment. Provides: - * SF_INSTANCE_URL + * SF_ORG_INSTANCE_URL * SF_ACCESS_TOKEN """ @@ -158,7 +158,7 @@ def _update_credentials(self): def _get_env(self): env = super(SalesforceCommand, self)._get_env() env["SF_ACCESS_TOKEN"] = self.org_config.access_token - env["SF_INSTANCE_URL"] = self.org_config.instance_url + env["SF_ORG_INSTANCE_URL"] = self.org_config.instance_url return env diff --git a/cumulusci/tasks/connectedapp.py b/cumulusci/tasks/connectedapp.py index 2566e3b13a..706c8132fc 100644 --- a/cumulusci/tasks/connectedapp.py +++ b/cumulusci/tasks/connectedapp.py @@ -47,7 +47,7 @@ class CreateConnectedApp(SFDXBaseTask): "description": "The email address to associate with the connected app. Defaults to email address from the github service if configured." }, "username": { - "description": "Create the connected app in a different org. Defaults to the defaultdevhubusername configured in sfdx.", + "description": "Create the connected app in a different org. Defaults to the target-dev-hub configured in sfdx.", "required": False, }, "connect": { @@ -63,7 +63,7 @@ class CreateConnectedApp(SFDXBaseTask): def _init_options(self, kwargs): self.client_id = None self.client_secret = None - kwargs["command"] = "force:mdapi:deploy --wait {}".format(self.deploy_wait) + kwargs["command"] = "project deploy start --wait {}".format(self.deploy_wait) super(CreateConnectedApp, self)._init_options(kwargs) # Validate label @@ -91,7 +91,7 @@ def _set_default_username(self): self.logger.info("Getting username for the default devhub from sfdx") output = [] self._run_command( - command="{} force:config:get defaultdevhubusername --json".format(SFDX_CLI), + command="{} force config get target-dev-hub --json".format(SFDX_CLI), env=self._get_env(), output_handler=output.append, ) @@ -109,7 +109,7 @@ def _process_devhub_output(self, output): data = self._process_json_output(output) if "value" not in data["result"][0]: raise TaskOptionsError( - "No sfdx config found for defaultdevhubusername. Please use the sfdx force:config:set to set the defaultdevhubusername and run again" + "No sfdx config found for target-dev-hub. Please use the sf force config set to set the target-dev-hub and run again" ) self.options["username"] = data["result"][0]["value"] @@ -166,11 +166,11 @@ def _connect_service(self): def _get_command(self): command = super()._get_command() - # Default to sfdx defaultdevhubusername + # Default to sf target-dev-hub if "username" not in self.options: self._set_default_username() - command += " -u {}".format(self.options.get("username")) - command += " -d {}".format(self.tempdir) + command += " -o {}".format(self.options.get("username")) + command += " --metadata-dir {}".format(self.tempdir) return command def _run_task(self): diff --git a/cumulusci/tasks/create_package_version.py b/cumulusci/tasks/create_package_version.py index 4104e2c0f7..14a80334fc 100644 --- a/cumulusci/tasks/create_package_version.py +++ b/cumulusci/tasks/create_package_version.py @@ -228,9 +228,7 @@ def _run_task(self): package_zip_builder = None with convert_sfdx_source( self.project_config.default_package_path, - None - if self.package_config.package_type == PackageTypeEnum.unlocked - else self.package_config.package_name, + None, self.logger, ) as path: package_zip_builder = MetadataPackageZipBuilder( diff --git a/cumulusci/tasks/dx_convert_from.py b/cumulusci/tasks/dx_convert_from.py index ff242ce91e..45d9f1bd0a 100644 --- a/cumulusci/tasks/dx_convert_from.py +++ b/cumulusci/tasks/dx_convert_from.py @@ -16,8 +16,8 @@ class DxConvertFrom(SFDXBaseTask): def _init_options(self, kwargs): super()._init_options(kwargs) - # append command -d option to sfdx} force:source:convert - self.options["command"] = f"force:source:convert -d {self.options['src_dir']}" + # append command -d option to sf} project convert source + self.options["command"] = f"project convert source -d {self.options['src_dir']}" def _run_task(self): src_dir = Path(self.options["src_dir"]) diff --git a/cumulusci/tasks/github/tests/test_release.py b/cumulusci/tasks/github/tests/test_release.py index f528e89820..abcb23f645 100644 --- a/cumulusci/tasks/github/tests/test_release.py +++ b/cumulusci/tasks/github/tests/test_release.py @@ -3,6 +3,7 @@ import pytest import responses +from responses.matchers import json_params_matcher from cumulusci.core.config import ServiceConfig, TaskConfig from cumulusci.core.exceptions import GithubException, TaskOptionsError @@ -354,7 +355,7 @@ def test_run_task__with_beta_2gp(self): url=self.repo_api_url + "/releases", json=self._get_expected_release("release"), match=[ - responses.json_params_matcher( + json_params_matcher( { "tag_name": "beta/1.1", "name": "1.1", diff --git a/cumulusci/tasks/metadata/package.py b/cumulusci/tasks/metadata/package.py index 458f742ae7..a1e546ae3b 100644 --- a/cumulusci/tasks/metadata/package.py +++ b/cumulusci/tasks/metadata/package.py @@ -4,6 +4,7 @@ import urllib.parse from logging import Logger, getLogger from pathlib import Path +from typing import Dict, List import yaml @@ -40,6 +41,28 @@ def metadata_sort_key_section(name): return key +def process_common_components(response_messages: List, components: Dict): + """Compare compoents in the api responce object with list of components and return common common components""" + if not response_messages or not components: + return components + + for message in response_messages: + message_list = message.firstChild.nextSibling.firstChild.nodeValue.split("'") + if len(message_list) > 1: + component_type = message_list[1] + message_txt = message_list[2] + + if "is not available in this organization" in message_txt: + del components[component_type] + else: + component_name = message_list[3] + if component_name in components[component_type]: + components[component_type].remove(component_name) + if len(components[component_type]) == 0: + del components[component_type] + return components + + class MetadataParserMissingError(Exception): pass diff --git a/cumulusci/tasks/metadata/tests/test_package.py b/cumulusci/tasks/metadata/tests/test_package.py index 0305e23a88..4ba10fba43 100644 --- a/cumulusci/tasks/metadata/tests/test_package.py +++ b/cumulusci/tasks/metadata/tests/test_package.py @@ -2,6 +2,7 @@ from unittest import mock import pytest +from defusedxml.minidom import parseString from cumulusci.core.config import ( BaseProjectConfig, @@ -27,6 +28,7 @@ RecordTypeParser, UpdatePackageXml, metadata_sort_key, + process_common_components, ) from cumulusci.utils import temporary_dir, touch @@ -398,3 +400,77 @@ def test_run_task(self): with open(output_path, "r") as f: result = f.read() assert expected == result + + +class TestProcessComponents: + response = """ + + + + true + + 0058N000006PycGQAS + User User + 2024-10-08T22:54:34.372Z + unpackaged/labels/CustomLabels.labels + CustomLabels + 000000000000000AAA + 0058N000006PycGQAS + User User + 2024-10-08T22:54:34.372Z + CustomLabels + + 09S8N000002vlujUAA + + Entity of type 'ApexClass' 'TestClass' cannot be found + unpackaged/package.xml + + + Entity of type 'CustomObject' 'TestObject' cannot be found + unpackaged/package.xml + + + Entity of type 'CustomObject' 'AnotherObject' cannot be found + unpackaged/package.xml + + + """ + + def test_process_common_components(self): + + response_messages = parseString(self.response).getElementsByTagName("messages") + + components = { + "ApexClass": {"TestClass", "AnotherClass"}, + "CustomObject": {"TestObject", "AnotherObject"}, + } + + result = process_common_components(response_messages, components) + + expected_components = { + "ApexClass": {"AnotherClass"}, + } + + assert result == expected_components + assert "ApexClass" in result + assert "AnotherClass" in result["ApexClass"] + assert "TestClass" not in result["ApexClass"] + assert "CustomObject" not in result + + def test_process_common_components_no_response_messages(self): + components = { + "ApexClass": {"TestClass", "AnotherClass"}, + "CustomObject": {"TestObject", "AnotherObject"}, + } + + result = process_common_components([], components) + + # If there are no response messages, the components list should remain unchanged + assert result == components + + def test_process_common_components_no_components(self): + response_messages = parseString(self.response).getElementsByTagName("messages") + result = process_common_components(response_messages, {}) + assert result == {} diff --git a/cumulusci/tasks/metadata_etl/tests/test_value_sets.py b/cumulusci/tasks/metadata_etl/tests/test_value_sets.py index 5f30efd8ce..af7d537eff 100644 --- a/cumulusci/tasks/metadata_etl/tests/test_value_sets.py +++ b/cumulusci/tasks/metadata_etl/tests/test_value_sets.py @@ -38,8 +38,8 @@ def test_adds_entry(self): "api_version": "47.0", "api_names": "bar,foo", "entries": [ - {"fullName": "Test", "label": "Label"}, - {"fullName": "Test_2", "label": "Label 2"}, + {"fullName": "Test", "label": "Label", "group": "Schedule"}, + {"fullName": "Test_2", "label": "Label 2", "default": "true"}, ], }, ) @@ -57,6 +57,9 @@ def test_adds_entry(self): assert len(entry) == 1 label = entry[0].findall(f".//{MD}label") assert len(label) == 1 + group = entry[0].findall(f".//{MD}group") + assert group[0].text == "Schedule" + assert len(group) == 1 assert label[0].text == "Label" default = entry[0].findall(f".//{MD}default") assert len(default) == 1 diff --git a/cumulusci/tasks/metadata_etl/value_sets.py b/cumulusci/tasks/metadata_etl/value_sets.py index 06f79e32f4..91d9612004 100644 --- a/cumulusci/tasks/metadata_etl/value_sets.py +++ b/cumulusci/tasks/metadata_etl/value_sets.py @@ -88,15 +88,19 @@ def _transform_entity(self, metadata: MetadataElement, api_name: str): elem.append("default", text="false") - if api_name in ["OpportunityStage", "CaseStatus"]: + if api_name == "CaseStatus": elem.append("closed", str(entry["closed"]).lower()) - if api_name == "OpportunityStage": + elif api_name == "OpportunityStage": elem.append("won", str(entry["won"]).lower()) + elem.append("closed", str(entry["closed"]).lower()) elem.append("probability", str(entry["probability"])) elem.append("forecastCategory", entry["forecastCategory"]) - if api_name == "LeadStatus": + elif api_name == "LeadStatus": elem.append("converted", str(entry["converted"]).lower()) - + else: + for entry_key in entry: + if entry_key not in ["fullName", "label", "default"]: + elem.append(entry_key, str(entry[entry_key])) return metadata diff --git a/cumulusci/tasks/preflight/dataset_load.py b/cumulusci/tasks/preflight/dataset_load.py new file mode 100644 index 0000000000..41a89b5cd4 --- /dev/null +++ b/cumulusci/tasks/preflight/dataset_load.py @@ -0,0 +1,49 @@ +from cumulusci.core.datasets import Dataset +from cumulusci.core.exceptions import BulkDataException +from cumulusci.tasks.bulkdata.mapping_parser import ( + parse_from_yaml, + validate_and_inject_mapping, +) +from cumulusci.tasks.bulkdata.step import DataOperationType +from cumulusci.tasks.salesforce import BaseSalesforceApiTask + + +class LoadDataSetCheck(BaseSalesforceApiTask): + task_docs = """ + A preflight check to ensure a dataset can be loaded successfully + """ + task_options = { + "dataset": { + "description": "Dataset on which preflight checks need to be performed", + "required": False, + }, + } + + def _init_options(self, kwargs): + super(BaseSalesforceApiTask, self)._init_options(kwargs) + if "dataset" not in self.options: + self.options["dataset"] = "default" + + def _run_task(self): + mapping_file_path = Dataset( + self.options["dataset"], + self.project_config, + self.sf, + self.org_config, + schema=None, + ).mapping_file + self.mapping = parse_from_yaml(mapping_file_path) + try: + validate_and_inject_mapping( + mapping=self.mapping, + sf=self.sf, + namespace=self.project_config.project__package__namespace, + data_operation=DataOperationType.INSERT, + inject_namespaces=True, + drop_missing=False, + ) + self.return_values = True + except BulkDataException as e: + self.logger.error(e) + self.return_values = False + return self.return_values diff --git a/cumulusci/tasks/preflight/permsets.py b/cumulusci/tasks/preflight/permsets.py index fe7adf39d8..9e84787398 100644 --- a/cumulusci/tasks/preflight/permsets.py +++ b/cumulusci/tasks/preflight/permsets.py @@ -9,11 +9,14 @@ def _run_task(self): for result in self.sf.query_all(query)["records"]: if result["PermissionSet"]["Name"] not in self.return_values: self.return_values.append(result["PermissionSet"]["Name"]) - if result["PermissionSetGroupId"] is not None: psg_query = f"SELECT PermissionSet.Name from PermissionSetGroupComponent where PermissionSetGroupId = '{result['PermissionSetGroupId']}'" for psg_result in self.sf.query_all(psg_query)["records"]: - if psg_result["PermissionSet"]["Name"] not in self.return_values: + if ( + psg_result["PermissionSet"] + and psg_result["PermissionSet"]["Name"] + not in self.return_values + ): self.return_values.append(psg_result["PermissionSet"]["Name"]) permsets_str = "\n".join(self.return_values) diff --git a/cumulusci/tasks/preflight/tests/test_dataset_load.py b/cumulusci/tasks/preflight/tests/test_dataset_load.py new file mode 100644 index 0000000000..b57b2a4acc --- /dev/null +++ b/cumulusci/tasks/preflight/tests/test_dataset_load.py @@ -0,0 +1,85 @@ +from unittest import mock + +import pytest + +from cumulusci.core.exceptions import BulkDataException +from cumulusci.tasks.bulkdata.mapping_parser import MappingStep +from cumulusci.tasks.bulkdata.step import DataApi, DataOperationType +from cumulusci.tasks.preflight.dataset_load import LoadDataSetCheck +from cumulusci.tasks.salesforce.tests.util import create_task + + +class TestLoadDataSetCheck: + @mock.patch( + "cumulusci.tasks.preflight.dataset_load.validate_and_inject_mapping", + return_value=True, + ) + def test_run_task(self, validate_and_inject_mapping): + task = create_task(LoadDataSetCheck, {}) + assert task() + assert task.options["dataset"] == "default" + assert task.mapping == { + "Account": MappingStep( + sf_object="Account", + table="Account", + fields={ + "Name": "Name", + "Description": "Description", + "ShippingStreet": "ShippingStreet", + "ShippingCity": "ShippingCity", + "ShippingState": "ShippingState", + "ShippingPostalCode": "ShippingPostalCode", + "ShippingCountry": "ShippingCountry", + "Phone": "Phone", + "AccountNumber": "AccountNumber", + }, + lookups={}, + static={}, + filters=[], + action=DataOperationType.INSERT, + api=DataApi.BULK, + batch_size=1, + oid_as_pk=False, + record_type=None, + bulk_mode=None, + anchor_date=None, + soql_filter=None, + update_key=(), + ), + "Contact": MappingStep( + sf_object="Contact", + table="Contact", + fields={"FirstName": "FirstName"}, + lookups={}, + static={}, + filters=[], + action=DataOperationType.INSERT, + api=DataApi.BULK, + batch_size=1, + oid_as_pk=False, + record_type=None, + bulk_mode=None, + anchor_date=None, + soql_filter=None, + update_key=(), + ), + } + + def test_mapping_file_not_found(self): + task = create_task(LoadDataSetCheck, {"dataset": "alpha"}) + with pytest.raises(Exception) as e: + task() + assert "No such file or directory" in str(e.value) + assert task.options["dataset"] == "alpha" + + @mock.patch( + "cumulusci.tasks.preflight.dataset_load.validate_and_inject_mapping", + side_effect=BulkDataException("An error occurred during validation"), + ) + def test_run_fail(self, validate_and_inject_mapping): + task = create_task(LoadDataSetCheck, {}) + task.logger = mock.Mock() + assert not task() + assert task.logger.error.asset_called_once_with( + "An error occurred during validation" + ) diff --git a/cumulusci/tasks/release_notes/parser.py b/cumulusci/tasks/release_notes/parser.py index 3e307cfaa7..1fef5d4e16 100644 --- a/cumulusci/tasks/release_notes/parser.py +++ b/cumulusci/tasks/release_notes/parser.py @@ -186,7 +186,7 @@ class GithubIssuesParser(IssuesParser): def __new__(cls, release_notes_generator, title, issue_regex=None): if not release_notes_generator.has_issues: - logging.getLogger(__file__).warn( + logging.getLogger(__file__).warning( "Issues are disabled for this repository. Falling back to change notes parser." ) return GithubLinesParser(release_notes_generator, title) diff --git a/cumulusci/tasks/robotframework/libdoc.py b/cumulusci/tasks/robotframework/libdoc.py index 876a09cf39..b9e0ad37d0 100644 --- a/cumulusci/tasks/robotframework/libdoc.py +++ b/cumulusci/tasks/robotframework/libdoc.py @@ -123,7 +123,7 @@ def _run_task(self): if library_name.endswith(".robot"): libdoc = ResourceDocBuilder().build(library_name) else: - libdoc = DocumentationBuilder(library_name).build(library_name) + libdoc = DocumentationBuilder().build(library_name) kwfile.add_keywords(libdoc) # if we get here, we were able to process the file correctly diff --git a/cumulusci/tasks/robotframework/tests/test_robotframework.py b/cumulusci/tasks/robotframework/tests/test_robotframework.py index 0a9a4e7a74..537001b888 100644 --- a/cumulusci/tasks/robotframework/tests/test_robotframework.py +++ b/cumulusci/tasks/robotframework/tests/test_robotframework.py @@ -762,7 +762,7 @@ def test_pageobject_docstring(self): class TestRobotPerformanceKeywords: - def setup(self): + def setup_method(self): self.datadir = os.path.dirname(__file__) @contextmanager diff --git a/cumulusci/tasks/salesforce/BaseRetrieveMetadata.py b/cumulusci/tasks/salesforce/BaseRetrieveMetadata.py index b8bb7da5f5..65e5a8be42 100644 --- a/cumulusci/tasks/salesforce/BaseRetrieveMetadata.py +++ b/cumulusci/tasks/salesforce/BaseRetrieveMetadata.py @@ -1,4 +1,5 @@ import functools +from zipfile import ZipFile from cumulusci.tasks.salesforce.BaseSalesforceMetadataApiTask import ( BaseSalesforceMetadataApiTask, @@ -52,6 +53,6 @@ def _process_namespace(self, src_zip): ) return src_zip - def _extract_zip(self, src_zip): + def _extract_zip(self, src_zip: ZipFile): src_zip = self._process_namespace(src_zip) src_zip.extractall(self.options["path"]) diff --git a/cumulusci/tasks/salesforce/Deploy.py b/cumulusci/tasks/salesforce/Deploy.py index bf27b86f53..66a5851b80 100644 --- a/cumulusci/tasks/salesforce/Deploy.py +++ b/cumulusci/tasks/salesforce/Deploy.py @@ -16,6 +16,7 @@ from cumulusci.salesforce_api.metadata import ApiDeploy, ApiRetrieveUnpackaged from cumulusci.salesforce_api.package_zip import MetadataPackageZipBuilder from cumulusci.salesforce_api.rest_deploy import RestDeploy +from cumulusci.tasks.metadata.package import process_common_components from cumulusci.tasks.salesforce.BaseSalesforceMetadataApiTask import ( BaseSalesforceMetadataApiTask, ) @@ -169,38 +170,21 @@ def _create_api_object(self, package_xml, api_version): return api_retrieve_unpackaged_object def _collision_check(self, src_path): - xml_map = {} is_collision = False package_xml = open(f"{src_path}/package.xml", "r") source_xml_tree = metadata_tree.parse(f"{src_path}/package.xml") - for type in source_xml_tree.types: - members = [] - try: - for member in type.members: - members.append(member.text) - except AttributeError: # Exception if there are no members for a type - pass - xml_map[type["name"].text] = members - api_retrieve_unpackaged_response = self._create_api_object( package_xml.read(), source_xml_tree.version.text ) + xml_map = metadata_tree.parse_package_xml_types("name", source_xml_tree) + messages = parseString( api_retrieve_unpackaged_response._get_response().content ).getElementsByTagName("messages") - for i in range(len(messages)): - # print(messages[i]) - message_list = messages[ - i - ].firstChild.nextSibling.firstChild.nodeValue.split("'") - - if message_list[3] in xml_map[message_list[1]]: - xml_map[message_list[1]].remove(message_list[3]) - if len(xml_map[message_list[1]]) == 0: - del xml_map[message_list[1]] + process_common_components(messages, xml_map) for type, api_names in xml_map.items(): if len(api_names) != 0: diff --git a/cumulusci/tasks/salesforce/EnsureRecordTypes.py b/cumulusci/tasks/salesforce/EnsureRecordTypes.py index e9db3dfff4..c7e793f09b 100644 --- a/cumulusci/tasks/salesforce/EnsureRecordTypes.py +++ b/cumulusci/tasks/salesforce/EnsureRecordTypes.py @@ -1,5 +1,6 @@ import os import re +from xml.sax.saxutils import escape from cumulusci.core.exceptions import TaskOptionsError from cumulusci.core.utils import process_bool_arg @@ -152,7 +153,7 @@ def _build_package(self): record_type_developer_name=self.options[ "record_type_developer_name" ], - stage_name=self.options["stage_name"], + stage_name=escape(self.options["stage_name"]), default=default, ) business_process_link = BUSINESS_PROCESS_LINK.format( diff --git a/cumulusci/tasks/salesforce/RetrievePackaged.py b/cumulusci/tasks/salesforce/RetrievePackaged.py index 25ddddf57e..6a24d1f5e4 100644 --- a/cumulusci/tasks/salesforce/RetrievePackaged.py +++ b/cumulusci/tasks/salesforce/RetrievePackaged.py @@ -1,3 +1,5 @@ +from zipfile import ZipFile + from cumulusci.salesforce_api.metadata import ApiRetrievePackaged from cumulusci.tasks.salesforce import BaseRetrieveMetadata from cumulusci.utils import zip_subfolder @@ -34,6 +36,6 @@ def _get_api(self): self, self.options["package"], self.options.get("api_version") ) - def _extract_zip(self, src_zip): + def _extract_zip(self, src_zip: ZipFile): src_zip = zip_subfolder(src_zip, self.options.get("package")) super(RetrievePackaged, self)._extract_zip(src_zip) diff --git a/cumulusci/tasks/salesforce/RetrieveUnpackaged.py b/cumulusci/tasks/salesforce/RetrieveUnpackaged.py index df9a3f186d..b9a5695eaf 100644 --- a/cumulusci/tasks/salesforce/RetrieveUnpackaged.py +++ b/cumulusci/tasks/salesforce/RetrieveUnpackaged.py @@ -27,11 +27,10 @@ def _init_options(self, kwargs): super(RetrieveUnpackaged, self)._init_options(kwargs) if "package_xml" in self.options: - self.options["package_xml_path"] = self.options["package_xml"] - with open(self.options["package_xml_path"], "r") as f: - self.options["package_xml"] = f.read() + with open(self.options["package_xml"], "r") as f: + self.options["package_xml_content"] = f.read() def _get_api(self): return self.api_class( - self, self.options["package_xml"], self.options.get("api_version") + self, self.options["package_xml_content"], self.options.get("api_version") ) diff --git a/cumulusci/tasks/salesforce/check_components.py b/cumulusci/tasks/salesforce/check_components.py new file mode 100644 index 0000000000..54b797f312 --- /dev/null +++ b/cumulusci/tasks/salesforce/check_components.py @@ -0,0 +1,279 @@ +import json +import os +import shutil +import tempfile +from collections import defaultdict +from xml.etree.ElementTree import ParseError + +from defusedxml.minidom import parseString + +from cumulusci.core.config import FlowConfig, TaskConfig +from cumulusci.core.exceptions import TaskOptionsError +from cumulusci.core.flowrunner import FlowCoordinator +from cumulusci.core.sfdx import convert_sfdx_source +from cumulusci.core.utils import process_list_arg +from cumulusci.salesforce_api.metadata import ApiRetrieveUnpackaged +from cumulusci.tasks.metadata.package import process_common_components +from cumulusci.tasks.salesforce import BaseSalesforceTask +from cumulusci.utils import cd +from cumulusci.utils.xml import metadata_tree + + +class CheckComponents(BaseSalesforceTask): + api_retrieve_unpackaged = ApiRetrieveUnpackaged + task_options = { + "paths": { + "description": "List of deploy paths to check", + "required": False, + }, + "name": { + "description": "The name of the current plan or flow to detect deploy paths", + "required": False, + }, + } + deploy_paths = [] + + def _init_options(self, kwargs): + super(CheckComponents, self)._init_options(kwargs) + if "paths" in self.options and "name" in self.options: + raise TaskOptionsError("Please provide either --paths or --name") + if "paths" not in self.options and "name" not in self.options: + raise TaskOptionsError( + "This task requires a plan/flow name or paths options. Pass --paths or --name options" + ) + + def _run_task(self): + # Check if paths are provided in options. Assuming to only check for those paths + paths = self.options.get("paths") + plan_or_flow_name = self.options.get("name") + + if paths: + paths = process_list_arg(paths) + self.logger.info(f"Using provided paths: {paths}") + self.deploy_paths = paths + elif plan_or_flow_name: + # if path is not provided + is_plan = self._is_plan(plan_or_flow_name) + if is_plan is None: + raise TaskOptionsError( + f"Plan or flow name '{plan_or_flow_name}' not found" + ) + + self.logger.info( + f"Analyzing project {'plan' if is_plan else 'flow'}: {plan_or_flow_name}" + ) + + # load deploy paths from all the steps in plan or flow + self._load_deploy_paths(plan_or_flow_name, is_plan) + if not self.deploy_paths: + self.logger.warning("No deploy paths found in the plan or flow.") + return + self.logger.debug( + f"deploy paths found in the plan or flow.{self.deploy_paths}" + ) + + # Temp dir to copy all deploy paths from task options + temp_dir = tempfile.mkdtemp() + self.logger.info(f"Temporary deploy directory created: {temp_dir}") + + for path in self.deploy_paths: + full_path = os.path.join(self.project_config.repo_root, path) + if not os.path.exists(full_path): + self.logger.info(f"Skipping path: '{path}' - path doesn't exist") + continue + self._copy_to_tempdir(path, temp_dir) + + ( + components, + api_retrieve_unpackaged_response, + ) = self._collect_components_from_paths(temp_dir) + + # remove temp dir + shutil.rmtree(temp_dir) + + if not components: + self.logger.info(f"No components found in deploy path{path}") + raise TaskOptionsError("No plan or paths options provided") + + self.logger.debug("Components detected at source") + for component_type, component_names in components.items(): + self.logger.debug(f"{component_type}: {', '.join(component_names)}") + # check common components + existing_components = process_common_components( + api_retrieve_unpackaged_response, components + ) + + if existing_components: + self.logger.info("Components exists in the target org:") + for component_type, component_names in existing_components.items(): + self.logger.info(f"{component_type}: {', '.join(component_names)}") + self.return_values["existing_components"] = existing_components + else: + self.logger.info( + "No components from the deploy paths exist in the target org." + ) + + def _copy_to_tempdir(self, src_dir, temp_dir): + for item in os.listdir(src_dir): + src_item = os.path.join(src_dir, item) + dst_item = os.path.join(temp_dir, item) + + if os.path.isdir(src_item): + if not os.path.exists(dst_item): + shutil.copytree(src_item, dst_item) + else: + self._merge_directories(src_item, dst_item) + else: + if not os.path.exists(dst_item): + shutil.copy2(src_item, dst_item) + else: + self.logger.debug(f"File {dst_item} already exists, skipping...") + + def _merge_directories(self, src_dir, dst_dir): + for item in os.listdir(src_dir): + src_item = os.path.join(src_dir, item) + dst_item = os.path.join(dst_dir, item) + + if os.path.isdir(src_item): + if not os.path.exists(dst_item): + shutil.copytree(src_item, dst_item) + self._merge_directories(src_item, dst_item) + else: + if not os.path.exists(dst_item): + shutil.copy2(src_item, dst_item) # Copy file if it doesn't exist + else: + self.logger.debug(f"File {dst_item} already exists, skipping...") + + def _is_plan(self, name): + + if self.project_config.lookup(f"plans__{name}") is not None: + return True + elif self.project_config.lookup(f"flows__{name}") is not None: + return False + else: + raise TaskOptionsError( + "No paths provided and unable to determine the current plan or flow name." + ) + + def _get_plan_tasks(self, name, is_plan=False): + + tasks = [] + if is_plan: + step_config = self.project_config.lookup(f"plans__{name}") + else: + step_config = self.project_config.lookup(f"flows__{name}") + + tasks = self._freeze_steps(self.project_config, step_config) + + return tasks + + def _freeze_steps(self, project_config, plan_config) -> list: + steps = plan_config["steps"] + flow_config = FlowConfig(plan_config) + flow_config.project_config = project_config + flow = FlowCoordinator(project_config, flow_config) + steps = [] + for step in flow.steps: + if step.skip: + continue + with cd(step.project_config.repo_root): + task = step.task_class( + step.project_config, + TaskConfig(step.task_config), + name=step.task_name, + ) + steps.extend(task.freeze(step)) + self.logger.debug("Prepared steps:\n" + json.dumps(steps, indent=4)) + return steps + + def _load_deploy_paths(self, name, is_plan=False): + tasks = self._get_plan_tasks(name, is_plan) + if tasks: + self.deploy_paths = self._get_deployable_paths(tasks) + + def _get_deployable_paths(self, tasks): + found_paths = [] + paths_to_search = ["path", "subfolder"] + for task in tasks: + if "task_config" in task and "options" in task["task_config"]: + options = task["task_config"]["options"] + found_paths.extend(self._search_for_paths(options, paths_to_search)) + return found_paths + + def _search_for_paths(self, options, keys_to_search): + found_values = [] + + if not keys_to_search: + return found_values + + def recursive_search(obj): + if isinstance(obj, dict): + for key, value in obj.items(): + if key in keys_to_search and isinstance(value, str): + found_values.append(value) + elif isinstance(value, (dict, list)): + recursive_search(value) + + elif isinstance(obj, list): + for item in obj: + recursive_search(item) + + recursive_search(options) + + return found_values + + def _collect_components_from_paths(self, full_path): + + if not os.path.exists(full_path): + return None, None + + components = defaultdict(set) + self.logger.info(f"Collecting components from path: {full_path}") + # remove if any exiting package.xml files coppied from deploy_pre/post paths + + if os.path.exists(os.path.join(full_path, "package.xml")): + os.remove(os.path.join(full_path, "package.xml")) + + with convert_sfdx_source(full_path, None, self.logger) as src_path: + package_xml_path = os.path.join(src_path, "package.xml") + if os.path.exists(package_xml_path): + try: + source_xml_tree = metadata_tree.parse(package_xml_path) + self.logger.info("parsing package.xml") + + components = metadata_tree.parse_package_xml_types( + "name", source_xml_tree + ) + + response_messages = self._get_api_object_responce( + package_xml_path, source_xml_tree.version.text + ) + + return [components, response_messages] + + except ParseError as e: + self.logger.error(f"Error parsing package.xml: {e}") + return None, None + else: + self.logger.warning( + f"No package.xml found in {full_path}, scanning directories" + ) + + return None, None + + def _get_api_object_responce(self, pakcage_xml_path, version): + + if not os.path.exists(pakcage_xml_path): + return None + + package_xml = open(pakcage_xml_path, "r") + + api_retrieve_unpackaged_object = self.api_retrieve_unpackaged( + self, package_xml.read(), version + ) + + response_messages = parseString( + api_retrieve_unpackaged_object._get_response().content + ).getElementsByTagName("messages") + + return response_messages diff --git a/cumulusci/tasks/salesforce/salesforce_files.py b/cumulusci/tasks/salesforce/salesforce_files.py new file mode 100644 index 0000000000..be39322ca7 --- /dev/null +++ b/cumulusci/tasks/salesforce/salesforce_files.py @@ -0,0 +1,244 @@ +import json +import os + +import requests + +from cumulusci.tasks.salesforce import BaseSalesforceApiTask + + +class ListFiles(BaseSalesforceApiTask): + task_docs = """ + Lists the available documents that have been uploaded to a library in Salesforce CRM Content or Salesforce Files + """ + + def _run_task(self): + self.return_values = [ + { + "Id": result["Id"], + "FileName": result["Title"], + "FileType": result["FileType"], + } + for result in self.sf.query( + "SELECT Title, Id, FileType FROM ContentDocument" + )["records"] + ] + self.logger.info(f"Found {len(self.return_values)} files") + if len(self.return_values) > 0: + self.logger.info(f"{'Id':<20} {'FileName':<50} {'FileType':<10}") + + # Print each row of the table + for file_desc in self.return_values: + self.logger.info( + f"{file_desc['Id']:<20} {file_desc['FileName']:<50} {file_desc['FileType']:<10}" + ) + + return self.return_values + + +class RetrieveFiles(BaseSalesforceApiTask): + task_docs = """ + This task downloads all the documents (files) that have been uploaded to a library in Salesforce CRM Content or Salesforce Files. + Use the task list_files in order to view the files that are available to download. + """ + + task_options = { + "path": { + "description": "The directory where the files will be saved. By default, files will be saved in Files", + "required": False, + }, + "file_list": { + "description": "Specify a comma-separated list of the names of the files along with file extension to download, enclosed in double quotation marks. All the availables files are downloaded by default. Use list_files task to view files in the specified org.", + "required": False, + }, + } + + def _init_options(self, kwargs): + super(RetrieveFiles, self)._init_options(kwargs) + + if "path" not in self.options: + self.options["path"] = "Files" + + if "file_list" not in self.options: + self.options["file_list"] = "" + + self.return_values = [] + + def _run_task(self): + self.logger.info("Retrieving files from the specified org..") + path = self.options["path"] + self.logger.info(f"Output directory: {path}") + + query_condition = "" + + file_list = self.options["file_list"] + + if ( + file_list + ): # If the list of names of files to be downloaded is specified, fetch only those files. + items_list = [item.strip() for item in file_list.split(",")] + conditions = [] + for item in items_list: + file_name, file_extension = os.path.splitext(item) + conditions.append( + f"(Title = '{file_name}' AND FileType = '{file_extension[1:]}')" + ) + query_condition = f"AND ({' OR '.join(conditions)})" + + available_files = [ + { + "Id": result["Id"], + "FileName": result["Title"], + "FileType": result["FileType"], + "VersionData": result["VersionData"], + "ContentDocumentId": result["ContentDocumentId"], + } + for result in self.sf.query( + f"SELECT Title, Id, FileType, VersionData, ContentDocumentId FROM ContentVersion WHERE isLatest=true {query_condition}" + )["records"] + ] + + self.logger.info(f"Found {len(available_files)} files in the org.\n") + self.logger.info( + f'Files will be downloaded in the directory: {self.options["path"]} \n' + ) + + for current_file in available_files: + versionData = current_file["VersionData"] + url = f"{self.org_config.instance_url}/{versionData}" + headers = {"Authorization": f"Bearer {self.org_config.access_token}"} + + response = requests.get(url, headers=headers, stream=True) + response.raise_for_status() + + file_extension = current_file["FileType"].lower() + local_filename = f"{current_file['FileName']}.{file_extension}" + local_filename = os.path.join(path, local_filename) + + self.logger.info(f"Downloading: {current_file['FileName']}") + + file_exists = os.path.exists(local_filename) + + if file_exists: + file_name = current_file["FileName"] + self.logger.info( + f"A file with the name {file_name} already exists in the directory. This file will be renamed." + ) + if file_exists: + count = 1 + while True: + local_filename = os.path.join( + path, + f"{current_file['FileName']} ({count}).{file_extension}", + ) + if not os.path.exists(local_filename): + break + count += 1 + + os.makedirs( + os.path.dirname(local_filename), exist_ok=True + ) # Create the folder if it doesn't exist + + with open(local_filename, "wb") as f: + for chunk in response.iter_content(chunk_size=8192): + if chunk: + f.write(chunk) + + self.logger.info("\n") + + self.return_values = available_files + return self.return_values + + +class UploadFiles(BaseSalesforceApiTask): + task_docs = """ + This task uploads files to a Salesforce org. + """ + task_options = { + "path": { + "description": "The directory to upload files from. By default, files under 'Files' folder are uploaded.", + "required": False, + }, + "file_list": { + "description": "Specify a comma-separated list of files to upload. All the files in the specified directory are uploaded by default.", + "required": False, + }, + } + + def _init_options(self, kwargs): + super(UploadFiles, self)._init_options(kwargs) + + if "path" not in self.options: + self.options["path"] = "Files" + + if "file_list" not in self.options: + self.options["file_list"] = "" + + self.return_values = [] + + def _run_task(self): + path = self.options["path"] + file_list = self.options["file_list"] + + # Salesforce REST API endpoint for uploading files + api_version = self.project_config.project__package__api_version + url = f"{self.org_config.instance_url}/services/data/v{api_version}/sobjects/ContentVersion/" + + # Prepare the request headers + headers = { + "Authorization": f"Bearer {self.org_config.access_token}", + } + + if file_list: + files_to_upload = file_list.split(",") + else: + files_to_upload = os.listdir(path) + + for filename in files_to_upload: + file_path = os.path.join(path, filename.strip()) + + if os.path.isfile(file_path): + with open(file_path, "rb") as file: + # Construct the payload for the entity content + title = os.path.splitext(os.path.basename(file_path))[ + 0 + ] # File name + + entity_content = { + "Title": title, + "PathOnClient": file_path, + } + + self.return_values.append(entity_content) + + files = { + "entity_content": ( + "", + json.dumps(entity_content), + "application/json", + ), + "VersionData": (filename, file, "application/octet-stream"), + } + + try: + response = requests.post(url, headers=headers, files=files) + response.raise_for_status() # Raise an exception for HTTP errors + + # Parse the response JSON + response_json = response.json() + + if response.status_code == 201: # Upload successful + content_version_id = response_json["id"] + self.logger.info( + f"File '{filename}' uploaded successfully. ContentVersion Id: {content_version_id}" + ) + else: + self.logger.error( + f"Failed to upload file '{filename}': {response_json}" + ) + except requests.RequestException as e: + self.logger.error(f"Error uploading file '{filename}': {e}") + self.logger.error( + e.response.content + ) # Print response content in case of error + + return self.return_values # Returns a list containing all the files uplaoded. diff --git a/cumulusci/tasks/salesforce/sourcetracking.py b/cumulusci/tasks/salesforce/sourcetracking.py index a7c2d02d6f..27567ddb20 100644 --- a/cumulusci/tasks/salesforce/sourcetracking.py +++ b/cumulusci/tasks/salesforce/sourcetracking.py @@ -150,7 +150,7 @@ def _reset_sfdx_snapshot(self): self.org_config, ScratchOrgConfig ): sfdx( - "force:source:tracking:reset", + "project reset tracking", args=["-p"], username=self.org_config.username, capture_output=True, @@ -225,10 +225,11 @@ def retrieve_components( api_version: str, project_config: BaseProjectConfig = None, retrieve_complete_profile: bool = False, + capture_output: bool = False, ): """Retrieve specified components from an org into a target folder. - Retrieval is done using the sfdx force:source:retrieve command. + Retrieval is done using the sf project retrieve start command. Set `md_format` to True if retrieving into a folder with a package in metadata format. In this case the folder will be temporarily @@ -239,7 +240,6 @@ def retrieve_components( target = os.path.realpath(target) profiles = [] - # If retrieve_complete_profile and project_config is None, raise error # This is because project_config is only required if retrieve_complete_profile is True if retrieve_complete_profile and project_config is None: @@ -273,7 +273,7 @@ def retrieve_components( {"packageDirectories": [{"path": "force-app", "default": True}]}, f ) sfdx( - "force:mdapi:convert", + "project convert mdapi", log_note="Converting to DX format", args=["-r", target, "-d", "force-app"], check_return=True, @@ -290,8 +290,8 @@ def retrieve_components( _write_manifest(components, package_xml_path, api_version) # Retrieve specified components in DX format - sfdx( - "force:source:retrieve", + p = sfdx( + "project retrieve start", access_token=org_config.access_token, log_note="Retrieving components", args=[ @@ -301,10 +301,11 @@ def retrieve_components( os.path.join(package_xml_path, "package.xml"), "-w", "5", + "--ignore-conflicts", ], - capture_output=False, + capture_output=capture_output, check_return=True, - env={"SFDX_INSTANCE_URL": org_config.instance_url}, + env={"SF_ORG_INSTANCE_URL": org_config.instance_url}, ) # Extract Profiles @@ -320,14 +321,13 @@ def retrieve_components( task_config=task_config, ) cls_retrieve_profile() - if md_format: # Convert back to metadata format sfdx( - "force:source:convert", + "project convert source", log_note="Converting back to metadata format", args=["-r", "force-app", "-d", target], - capture_output=False, + capture_output=capture_output, check_return=True, ) @@ -348,6 +348,10 @@ def retrieve_components( package_xml = PackageXmlGenerator(**package_xml_opts)() with open(os.path.join(target, "package.xml"), "w", encoding="utf-8") as f: f.write(package_xml) + if capture_output: + return p.stdout_text.read() + else: + return None class RetrieveChanges(ListChanges, BaseSalesforceApiTask): diff --git a/cumulusci/tasks/salesforce/tests/test_EnsureRecordTypes.py b/cumulusci/tasks/salesforce/tests/test_EnsureRecordTypes.py index ee165b5640..40d0e3e1b1 100644 --- a/cumulusci/tasks/salesforce/tests/test_EnsureRecordTypes.py +++ b/cumulusci/tasks/salesforce/tests/test_EnsureRecordTypes.py @@ -15,7 +15,7 @@ NPSP_Default true - Test + Identify & Qualify false @@ -79,7 +79,7 @@ "name": "StageName", "picklistValues": [ {"value": "Bad", "active": False}, - {"value": "Test", "active": True}, + {"value": "Identify & Qualify", "active": True}, ], }, ], @@ -134,7 +134,7 @@ def test_infers_correct_business_process(self): assert task.options["generate_business_process"] assert task.options["generate_record_type"] - assert task.options["stage_name"] == "Test" + assert task.options["stage_name"] == "Identify & Qualify" def test_no_business_process_where_unneeded(self): task = create_task( @@ -178,7 +178,6 @@ def test_generates_record_type_and_business_process(self): with open(os.path.join("objects", "Opportunity.object"), "r") as f: opp_contents = f.read() assert OPPORTUNITY_METADATA == opp_contents - assert OPPORTUNITY_METADATA == opp_contents with open(os.path.join("package.xml"), "r") as f: pkg_contents = f.read() assert PACKAGE_XML == pkg_contents @@ -203,8 +202,8 @@ def test_generates_record_type_and_business_process__case(self): with temporary_dir(): task._build_package() with open(os.path.join("objects", "Case.object"), "r") as f: - opp_contents = f.read() - assert CASE_METADATA == opp_contents + case_contents = f.read() + assert CASE_METADATA == case_contents with open(os.path.join("package.xml"), "r") as f: pkg_contents = f.read() assert PACKAGE_XML == pkg_contents diff --git a/cumulusci/tasks/salesforce/tests/test_check_components.py b/cumulusci/tasks/salesforce/tests/test_check_components.py new file mode 100644 index 0000000000..549275be99 --- /dev/null +++ b/cumulusci/tasks/salesforce/tests/test_check_components.py @@ -0,0 +1,241 @@ +from unittest.mock import ANY, MagicMock, mock_open, patch + +import pytest + +from cumulusci.core.config import TaskConfig +from cumulusci.core.exceptions import TaskOptionsError +from cumulusci.tasks.salesforce.check_components import CheckComponents +from cumulusci.tests.util import create_project_config + +from .util import create_task + + +class TestCheckComponents: + @patch("os.path.exists", return_value=True) + @patch("os.remove") + @patch("os.path.isdir", return_value=True) + @patch("os.listdir", return_value=["some_file_or_directory"]) + @patch("os.path.join", side_effect=lambda *args: "/".join(args)) + @patch("cumulusci.core.sfdx.convert_sfdx_source") + @patch( + "builtins.open", + new_callable=mock_open, + read_data=""" + + + Delivery + ApexClass + + + Delivery__c + CustomObject + + 58.0 + + """, + ) + @patch("cumulusci.utils.xml.metadata_tree.parse") + def test_collect_components_from_paths( + self, + mock_metadata_parse, + mock_open_file, + mock_convert_sfdx_source, + mock_path_join, + mock_listdir, + mock_isdir, + mock_remove, + mock_path_exists, + ): + mock_path_exists.return_value = True + mock_isdir.return_value = True + mock_listdir.return_value = ["some_file_or_directory"] + mock_path_join.side_effect = lambda *args: "/".join(args) + mock_convert_sfdx_source.return_value.__enter__.return_value = "/converted/path" + + mock_tree = MagicMock() + mock_tree.findall.return_value = [ + MagicMock( + findall=lambda tag: [MagicMock(text="Delivery")] + if tag == "members" + else [], + find=lambda tag: MagicMock(text="ApexClass") if tag == "name" else None, + ), + MagicMock( + findall=lambda tag: [MagicMock(text="Delivery__c")] + if tag == "members" + else [], + find=lambda tag: MagicMock(text="CustomObject") + if tag == "name" + else None, + ), + ] + mock_tree.find.return_value = MagicMock(text="58.0") + mock_metadata_parse.return_value = mock_tree + + response_messages = [ + MagicMock( + getElementsByTagName=MagicMock( + return_value=[ + MagicMock( + firstChild=MagicMock( + nodeValue="Entity of type 'ApexClass' named 'CustomHealth' is cannot be found" + ) + ) + ] + ) + ) + ] + + with patch("cumulusci.core.sfdx.sfdx") as sfdx: + with patch.object( + CheckComponents, "_get_api_object_responce" + ) as mock_get_api_response: + mock_get_api_response.return_value = response_messages + task = create_task(CheckComponents, {"paths": "force-app/main/default"}) + components, api_response = task._collect_components_from_paths( + "force-app/main/default" + ) + + assert components is not None + assert "ApexClass" not in components + sfdx.assert_called_once_with( + "project convert source", + args=["-d", ANY, "-r", "force-app/main/default"], + capture_output=True, + check_return=True, + ) + + @patch("os.path.exists", return_value=False) + def test_collect_components_from_nonexistent_paths(self, mock_path_exists): + task = create_task(CheckComponents, {"paths": "invalid/path"}) + components, api_response = task._collect_components_from_paths("invalid/path") + assert components is None + assert api_response is None + + @patch("os.path.exists", return_value=False) + def test_copy_to_tempdir_nonexistent_src(self, mock_exists): + task = create_task(CheckComponents, {"paths": "force-app/main/default"}) + with pytest.raises(FileNotFoundError): + task._copy_to_tempdir("nonexistent_src", "temp_dir") + + @patch("shutil.copy2") + @patch("os.listdir", return_value=["file1", "file2"]) + @patch("os.path.isdir", return_value=False) + @patch("os.path.exists", return_value=False) + def test_copy_to_tempdir(self, mock_exists, mock_isdir, mock_listdir, mock_copy2): + task = create_task(CheckComponents, {"paths": "force-app/main/default"}) + task._copy_to_tempdir("force-app/main/default", "temp_dir") + mock_copy2.assert_called() + + def test_get_deployable_paths(self): + task = create_task(CheckComponents, {"paths": "force-app/main/default"}) + tasks = [ + { + "task_config": { + "options": { + "path": "unpackaged/pre", + "subfolder": "/unpackaged/post/test", + } + } + }, + {"task_config": {"options": {"path": "force-app"}}}, + ] + paths = task._get_deployable_paths(tasks) + assert paths == ["unpackaged/pre", "/unpackaged/post/test", "force-app"] + + def test_is_plan_valid(self): + task = create_task(CheckComponents, {"paths": "force-app/main/default"}) + with patch.object(task.project_config, "lookup") as mock_lookup: + mock_lookup.side_effect = lambda name: { + "plans__test_plan": True, + "flows__test_flow": False, + }.get(name, None) + assert task._is_plan("test_plan") is True + assert task._is_plan("test_flow") is False + + def test_is_plan_invalid(self): + task = create_task(CheckComponents, {"name": "invalid_name"}) + with patch.object(task.project_config, "lookup", return_value=None): + with pytest.raises( + TaskOptionsError, + match="No paths provided and unable to determine the current plan or flow name.", + ): + task._is_plan("invalid_name") + + def test_init_options_with_both_paths_and_name(self): + with pytest.raises( + TaskOptionsError, match="Please provide either --paths or --name" + ): + create_task(CheckComponents, {"paths": "some/path", "name": "some_plan"}) + + def test_init_options_with_neither_paths_nor_name(self): + with pytest.raises( + TaskOptionsError, + match="This task requires a plan/flow name or paths options. Pass --paths or --name options", + ): + create_task(CheckComponents, {}) + + def test_load_deploy_paths(self): + task = create_task(CheckComponents, {"name": "some_name"}) + with patch.object( + task, + "_get_plan_tasks", + return_value=[{"task_config": {"options": {"path": "unpackaged/pre"}}}], + ): + with patch.object( + task, "_get_deployable_paths", return_value=["unpackaged/pre"] + ): + task._load_deploy_paths("some_name", is_plan=True) + assert task.deploy_paths == ["unpackaged/pre"] + + def test_freeze_steps__skip(self): + project_config = create_project_config() + plan_config = { + "title": "Test Install", + "slug": "install", + "tier": "primary", + "steps": {1: {"task": "None"}}, + } + project_config.config["plans"] = { + "Test Install": plan_config, + } + task_config = TaskConfig({"options": {"name": "Test Install"}}) + task = CheckComponents(project_config, task_config) + steps = task._freeze_steps(project_config, plan_config) + assert steps == [] + + def test_freeze_steps_nested(self): + project_config = create_project_config() + flow_config = { + "test": { + "steps": { + 1: { + "flow": "test2", + } + } + }, + "test2": { + "steps": { + 1: { + "task": "deploy", + "options": {"path": "force-app/main/default"}, + } + } + }, + } + plan_config = { + "title": "Test Install", + "slug": "install", + "tier": "primary", + "steps": {1: {"flow": "test"}}, + } + project_config.config["plans"] = { + "Test Install": plan_config, + } + project_config.config["flows"] = flow_config + + task_config = TaskConfig({"options": {"name": "Test Install"}}) + task = CheckComponents(project_config, task_config) + steps = task._freeze_steps(project_config, plan_config) + assert steps[0]["name"] == "deploy" + assert len(steps) == 1 diff --git a/cumulusci/tasks/salesforce/tests/test_enable_prediction.py b/cumulusci/tasks/salesforce/tests/test_enable_prediction.py index 194c0e4a19..101860177c 100644 --- a/cumulusci/tasks/salesforce/tests/test_enable_prediction.py +++ b/cumulusci/tasks/salesforce/tests/test_enable_prediction.py @@ -1,5 +1,6 @@ import pytest import responses +from responses.matchers import json_params_matcher from cumulusci.core.config.org_config import OrgConfig from cumulusci.core.exceptions import CumulusCIException @@ -89,12 +90,12 @@ def test_run_task(mock_oauth, task): mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/001", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/002", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) task() @@ -164,12 +165,12 @@ def test_run_task__namespaced_org(mock_oauth, task): mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/001", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/002", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) mock_oauth.add( @@ -222,12 +223,12 @@ def test_run_task__managed_org(mock_oauth, task): mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/001", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/002", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) task() diff --git a/cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py b/cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py index 2dc1c1142e..066c371943 100644 --- a/cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py +++ b/cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py @@ -215,9 +215,9 @@ def test_run_task(self, sfdx, create_task_fixture): assert "SharingRules: alpha" in messages assert "SharingRules: BusinessBrand" not in messages assert sfdx_calls == [ - "force:mdapi:convert", - "force:source:retrieve", - "force:source:convert", + "project convert mdapi", + "project retrieve start", + "project convert source", ] assert os.path.exists(os.path.join("src", "package.xml")) diff --git a/cumulusci/tasks/salesforce/tests/test_salesforce_files.py b/cumulusci/tasks/salesforce/tests/test_salesforce_files.py new file mode 100644 index 0000000000..e236552fa0 --- /dev/null +++ b/cumulusci/tasks/salesforce/tests/test_salesforce_files.py @@ -0,0 +1,228 @@ +import json +import os +import unittest +from unittest.mock import Mock, call, mock_open, patch + +from cumulusci.tasks.salesforce.salesforce_files import ( + ListFiles, + RetrieveFiles, + UploadFiles, +) +from cumulusci.tasks.salesforce.tests.util import create_task + + +class TestDisplayFiles: + def test_display_files(self): + task = create_task(ListFiles, {}) + task._init_api = Mock() + task._init_api.return_value.query.return_value = { + "totalSize": 2, + "records": [ + {"Title": "TEST1", "Id": "0PS000000000000", "FileType": "TXT"}, + {"Title": "TEST2", "Id": "0PS000000000001", "FileType": "TXT"}, + ], + } + task() + + task._init_api.return_value.query.assert_called_once_with( + "SELECT Title, Id, FileType FROM ContentDocument" + ) + assert task.return_values == [ + {"Id": "0PS000000000000", "FileName": "TEST1", "FileType": "TXT"}, + {"Id": "0PS000000000001", "FileName": "TEST2", "FileType": "TXT"}, + ] + + +class TestRetrieveFiles(unittest.TestCase): + @patch("requests.get") + @patch("os.path.exists") + @patch("os.makedirs") + @patch("builtins.open") + def test_run_task(self, mock_open, mock_makedirs, mock_exists, mock_get): + # Mock Salesforce query response + mock_sf = Mock() + mock_sf.query.return_value = { + "totalSize": 2, + "records": [ + { + "Title": "TEST1", + "Id": "0PS000000000000", + "FileType": "TXT", + "VersionData": "version1", + "ContentDocumentId": "doc1", + }, + { + "Title": "TEST2", + "Id": "0PS000000000001", + "FileType": "TXT", + "VersionData": "version2", + "ContentDocumentId": "doc2", + }, + ], + } + + # Mock org config + mock_org_config = Mock() + mock_org_config.instance_url = "https://test.salesforce.com" + mock_org_config.access_token = "test token" + + # Create task with mocked Salesforce and org config + task = create_task(RetrieveFiles, {"path": "test_dir", "file_list": ""}) + task.sf = mock_sf + task.org_config = mock_org_config + + # Mock file existence and request response + mock_exists.return_value = False + mock_response = Mock() + mock_response.iter_content.return_value = [b"chunk1", b"chunk2"] + mock_response.raise_for_status = Mock() + mock_get.return_value = mock_response + + # Run the task + task._run_task() + + # Check if query was called with correct SOQL + mock_sf.query.assert_called_once_with( + "SELECT Title, Id, FileType, VersionData, ContentDocumentId FROM ContentVersion WHERE isLatest=true " + ) + + # Check if files are downloaded + expected_calls = [ + call( + "https://test.salesforce.com/version1", + headers={"Authorization": "Bearer test token"}, + stream=True, + ), + call( + "https://test.salesforce.com/version2", + headers={"Authorization": "Bearer test token"}, + stream=True, + ), + ] + mock_get.assert_has_calls(expected_calls, any_order=True) + + # Check if files are written correctly + mock_open.assert_any_call(os.path.join("test_dir", "TEST1.txt"), "wb") + mock_open.assert_any_call(os.path.join("test_dir", "TEST2.txt"), "wb") + + # Check if return values are set correctly + self.assertEqual( + task.return_values, + [ + { + "Id": "0PS000000000000", + "FileName": "TEST1", + "FileType": "TXT", + "VersionData": "version1", + "ContentDocumentId": "doc1", + }, + { + "Id": "0PS000000000001", + "FileName": "TEST2", + "FileType": "TXT", + "VersionData": "version2", + "ContentDocumentId": "doc2", + }, + ], + ) + + +class TestUploadFiles(unittest.TestCase): + @patch("requests.post") + @patch("os.listdir") + @patch("os.path.isfile") + @patch("builtins.open", new_callable=mock_open, read_data=b"test data") + def test_run_task(self, mock_open, mock_isfile, mock_listdir, mock_post): + # Mock org config and project config + mock_org_config = Mock() + mock_org_config.instance_url = "https://test.salesforce.com" + mock_org_config.access_token = "test token" + + mock_project_config = Mock() + mock_project_config.project__package__api_version = "50.0" + + # Create task with mocked configs + task = create_task(UploadFiles, {"path": "test_dir", "file_list": ""}) + task.org_config = mock_org_config + task.project_config = mock_project_config + + # Mock file discovery + mock_listdir.return_value = ["file1.txt", "file2.txt"] + mock_isfile.side_effect = lambda filepath: filepath in [ + os.path.join("test_dir", "file1.txt"), + os.path.join("test_dir", "file2.txt"), + ] + + # Mock requests response + mock_response = Mock() + mock_response.status_code = 201 + mock_response.json.return_value = {"id": "contentversionid"} + mock_post.return_value = mock_response + + # Run the task + task._run_task() + + mock_open.assert_any_call(os.path.join("test_dir", "file1.txt"), "rb") + mock_open.assert_any_call(os.path.join("test_dir", "file2.txt"), "rb") + + # Check if requests.post was called correctly + expected_calls = [ + call( + "https://test.salesforce.com/services/data/v50.0/sobjects/ContentVersion/", + headers={"Authorization": "Bearer test token"}, + files={ + "entity_content": ( + "", + json.dumps( + { + "Title": "file1", + "PathOnClient": os.path.join("test_dir", "file1.txt"), + } + ), + "application/json", + ), + "VersionData": ( + "file1.txt", + mock_open(), + "application/octet-stream", + ), + }, + ), + call( + "https://test.salesforce.com/services/data/v50.0/sobjects/ContentVersion/", + headers={"Authorization": "Bearer test token"}, + files={ + "entity_content": ( + "", + json.dumps( + { + "Title": "file2", + "PathOnClient": os.path.join("test_dir", "file2.txt"), + } + ), + "application/json", + ), + "VersionData": ( + "file2.txt", + mock_open(), + "application/octet-stream", + ), + }, + ), + ] + + self.assertEqual( + task.return_values, + [ + { + "Title": "file1", + "PathOnClient": os.path.join("test_dir", "file1.txt"), + }, + { + "Title": "file2", + "PathOnClient": os.path.join("test_dir", "file2.txt"), + }, + ], + ) + + mock_post.assert_has_calls(expected_calls, any_order=True) diff --git a/cumulusci/tasks/salesforce/tests/test_sourcetracking.py b/cumulusci/tasks/salesforce/tests/test_sourcetracking.py index 97583af20c..258d1a1e32 100644 --- a/cumulusci/tasks/salesforce/tests/test_sourcetracking.py +++ b/cumulusci/tasks/salesforce/tests/test_sourcetracking.py @@ -188,11 +188,10 @@ def test_run_task(self, sfdx, create_task_fixture): pathlib.Path, "is_dir", return_value=True ): task._run_task() - assert sfdx_calls == [ - "force:mdapi:convert", - "force:source:retrieve", - "force:source:convert", + "project convert mdapi", + "project retrieve start", + "project convert source", ] assert os.path.exists(os.path.join("src", "package.xml")) mock_retrieve_profile.assert_called() diff --git a/cumulusci/tasks/salesforce/users/permsets.py b/cumulusci/tasks/salesforce/users/permsets.py index 530f9bd4fe..8df9fa0ba6 100644 --- a/cumulusci/tasks/salesforce/users/permsets.py +++ b/cumulusci/tasks/salesforce/users/permsets.py @@ -158,7 +158,7 @@ def _process_composite_results(self, api_results): class AssignPermissionSetLicenses(AssignPermissionSets): task_docs = """ -Assigns Permission Set Licenses whose Developer Names are in ``api_names`` to either the default org user or the user whose Alias is ``user_alias``. This task skips assigning Permission Set Licenses that are already assigned. +Assigns Permission Set Licenses whose Developer Names or PermissionSetLicenseKey are in ``api_names`` to either the default org user or the user whose Alias is ``user_alias``. This task skips assigning Permission Set Licenses that are already assigned. Permission Set Licenses are usually associated with a Permission Set, and assigning the Permission Set usually assigns the associated Permission Set License automatically. However, in non-namespaced developer scratch orgs, assigning the associated Permission Set may not automatically assign the Permission Set License, and this task will ensure the Permission Set Licenses are assigned. """ @@ -174,12 +174,35 @@ class AssignPermissionSetLicenses(AssignPermissionSets): } permission_name = "PermissionSetLicense" - permission_name_field = "DeveloperName" + permission_name_field = ["DeveloperName", "PermissionSetLicenseKey"] permission_label = "Permission Set License" assignment_name = "PermissionSetLicenseAssign" assignment_lookup = "PermissionSetLicenseId" assignment_child_relationship = "PermissionSetLicenseAssignments" + def _get_perm_ids(self): + perms_by_ids = {} + api_names = "', '".join(self.options["api_names"]) + perms = self.sf.query( + f"SELECT Id,{self.permission_name_field[0]},{self.permission_name_field[1]} FROM {self.permission_name} WHERE {self.permission_name_field[0]} IN ('{api_names}') OR {self.permission_name_field[1]} IN ('{api_names}')" + ) + for p in perms["records"]: + if p[self.permission_name_field[0]] in self.options["api_names"]: + perms_by_ids[p["Id"]] = p[self.permission_name_field[0]] + else: + perms_by_ids[p["Id"]] = p[self.permission_name_field[1]] + + missing_perms = [ + api_name + for api_name in self.options["api_names"] + if api_name not in perms_by_ids.values() + ] + if missing_perms: + raise CumulusCIException( + f"The following {self.permission_label}s were not found: {', '.join(missing_perms)}." + ) + return perms_by_ids + class AssignPermissionSetGroups(AssignPermissionSets): task_docs = """ diff --git a/cumulusci/tasks/salesforce/users/tests/test_permsets.py b/cumulusci/tasks/salesforce/users/tests/test_permsets.py index 901a49952c..96bdf2de70 100644 --- a/cumulusci/tasks/salesforce/users/tests/test_permsets.py +++ b/cumulusci/tasks/salesforce/users/tests/test_permsets.py @@ -3,6 +3,7 @@ import pytest import responses +from responses.matchers import json_params_matcher from cumulusci.core.exceptions import CumulusCIException from cumulusci.tasks.salesforce.tests.util import create_task @@ -68,7 +69,7 @@ def test_create_permset(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -152,7 +153,7 @@ def test_create_permset__alias(self): {"id": "0Pa000000000001", "success": True, "errors": []}, ], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -383,7 +384,7 @@ def test_create_permset_partial_success_raises(self, table): }, ], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -447,19 +448,21 @@ def test_create_permsetlicense(self): ) responses.add( method="GET", - url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/query/?q=SELECT+Id%2CDeveloperName+FROM+PermissionSetLicense+WHERE+DeveloperName+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29", + url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/query/?q=SELECT+Id%2CDeveloperName%2CPermissionSetLicenseKey+FROM+PermissionSetLicense+WHERE+DeveloperName+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29+OR+PermissionSetLicenseKey+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29", status=200, json={ "done": True, - "totalSize": 1, + "totalSize": 2, "records": [ { "Id": "0PL000000000000", "DeveloperName": "PermSetLicense1", + "PermissionSetLicenseKey": "PermSetLicense1", }, { "Id": "0PL000000000001", "DeveloperName": "PermSetLicense2", + "PermissionSetLicenseKey": "PermSetLicense1", }, ], }, @@ -470,7 +473,7 @@ def test_create_permsetlicense(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -516,19 +519,21 @@ def test_create_permsetlicense__no_assignments(self): ) responses.add( method="GET", - url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/query/?q=SELECT+Id%2CDeveloperName+FROM+PermissionSetLicense+WHERE+DeveloperName+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29", + url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/query/?q=SELECT+Id%2CDeveloperName%2CPermissionSetLicenseKey+FROM+PermissionSetLicense+WHERE+DeveloperName+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29+OR+PermissionSetLicenseKey+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29", status=200, json={ "done": True, - "totalSize": 1, + "totalSize": 2, "records": [ { "Id": "0PL000000000000", "DeveloperName": "PermSetLicense1", + "PermissionSetLicenseKey": "PermSet.License1", }, { "Id": "0PL000000000001", "DeveloperName": "PermSetLicense2", + "PermissionSetLicenseKey": "PermSet.License2", }, ], }, @@ -543,7 +548,7 @@ def test_create_permsetlicense__no_assignments(self): {"id": "0Pa000000000001", "success": True, "errors": []}, ], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -597,23 +602,26 @@ def test_create_permsetlicense__alias(self): ) responses.add( method="GET", - url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/query/?q=SELECT+Id%2CDeveloperName+FROM+PermissionSetLicense+WHERE+DeveloperName+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29", + url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/query/?q=SELECT+Id%2CDeveloperName%2CPermissionSetLicenseKey+FROM+PermissionSetLicense+WHERE+DeveloperName+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29+OR+PermissionSetLicenseKey+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%29", status=200, json={ "done": True, - "totalSize": 1, + "totalSize": 2, "records": [ { "Id": "0PL000000000000", "DeveloperName": "PermSetLicense1", + "PermissionSetLicenseKey": "PermSetLicense1", }, { "Id": "0PL000000000001", "DeveloperName": "PermSetLicense2", + "PermissionSetLicenseKey": "PermSetLicense2", }, ], }, ) + responses.add( method="POST", url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/sobjects/PermissionSetLicenseAssign/", @@ -627,7 +635,7 @@ def test_create_permsetlicense__alias(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -698,7 +706,7 @@ def test_create_permsetlicense_raises(self): ) responses.add( method="GET", - url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/query/?q=SELECT+Id%2CDeveloperName+FROM+PermissionSetLicense+WHERE+DeveloperName+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%2C+%27PermSetLicense3%27%29", + url=f"{task.org_config.instance_url}/services/data/v{CURRENT_SF_API_VERSION}/query/?q=SELECT+Id%2CDeveloperName%2CPermissionSetLicenseKey+FROM+PermissionSetLicense+WHERE+DeveloperName+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%2C+%27PermSetLicense3%27%29+OR+PermissionSetLicenseKey+IN+%28%27PermSetLicense1%27%2C+%27PermSetLicense2%27%2C+%27PermSetLicense3%27%29", status=200, json={ "done": True, @@ -707,15 +715,16 @@ def test_create_permsetlicense_raises(self): { "Id": "0PL000000000000", "DeveloperName": "PermSetLicense1", + "PermissionSetLicenseKey": "PermSetLicense1", }, { "Id": "0PL000000000001", "DeveloperName": "PermSetLicense2", + "PermissionSetLicenseKey": "PermSetLicense2", }, ], }, ) - with pytest.raises(CumulusCIException): task() @@ -774,7 +783,7 @@ def test_create_permsetgroup(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -847,7 +856,7 @@ def test_create_permsetgroup__alias(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ diff --git a/cumulusci/tasks/sfdx.py b/cumulusci/tasks/sfdx.py index 612c1376f8..717fc26570 100644 --- a/cumulusci/tasks/sfdx.py +++ b/cumulusci/tasks/sfdx.py @@ -17,7 +17,7 @@ from cumulusci.core.tasks import BaseSalesforceTask from cumulusci.tasks.command import Command -SFDX_CLI = "sfdx" +SFDX_CLI = "sf" class SFDXBaseTask(Command): @@ -47,20 +47,20 @@ def _get_command(self): command = super()._get_command() # For scratch orgs, just pass the username in the command line if isinstance(self.org_config, ScratchOrgConfig): - command += " -u {username}".format(username=self.org_config.username) + command += " -o {username}".format(username=self.org_config.username) return command def _get_env(self): env = super(SFDXOrgTask, self)._get_env() if not isinstance(self.org_config, ScratchOrgConfig): # For non-scratch keychain orgs, pass the access token via env var - env["SFDX_INSTANCE_URL"] = self.org_config.instance_url - env["SFDX_DEFAULTUSERNAME"] = self.org_config.access_token + env["SF_ORG_INSTANCE_URL"] = self.org_config.instance_url + env["SF_TARGET_ORG"] = self.org_config.access_token return env class SFDXJsonTask(SFDXOrgTask): - command = "force:mdapi:deploy --json" + command = "project deploy start --json" task_options = { "extra": {"description": "Append additional options to the command"} diff --git a/cumulusci/tasks/tests/test_command.py b/cumulusci/tasks/tests/test_command.py index bea9df4fb6..128353f08c 100644 --- a/cumulusci/tasks/tests/test_command.py +++ b/cumulusci/tasks/tests/test_command.py @@ -126,4 +126,4 @@ def test_get_env(self): task = SalesforceCommand(self.project_config, self.task_config, self.org_config) env = task._get_env() assert "SF_ACCESS_TOKEN" in env - assert "SF_INSTANCE_URL" in env + assert "SF_ORG_INSTANCE_URL" in env diff --git a/cumulusci/tasks/tests/test_connectedapp.py b/cumulusci/tasks/tests/test_connectedapp.py index 3f4c4666b8..81e6f765eb 100644 --- a/cumulusci/tasks/tests/test_connectedapp.py +++ b/cumulusci/tasks/tests/test_connectedapp.py @@ -97,7 +97,10 @@ def test_get_command(self, run_command_mock): ) task.tempdir = "asdf" command = task._get_command() - assert command == "sfdx force:mdapi:deploy --wait 5 -u username -d asdf" + assert ( + command + == "sf project deploy start --wait 5 -o username --metadata-dir asdf" + ) def test_process_json_output(self): """_process_json_output returns valid json""" diff --git a/cumulusci/tasks/tests/test_create_package_version.py b/cumulusci/tasks/tests/test_create_package_version.py index 6d096012b5..e972529db8 100644 --- a/cumulusci/tasks/tests/test_create_package_version.py +++ b/cumulusci/tasks/tests/test_create_package_version.py @@ -472,7 +472,7 @@ def test_run_task( return_value=devhub_config, ): task() - + zf.close() assert task.return_values["dependencies"] == [ {"version_id": "04t000000000009AAA"} ] diff --git a/cumulusci/tasks/tests/test_dx_convert_from.py b/cumulusci/tasks/tests/test_dx_convert_from.py index a966fc7c1b..5c7b3f35bb 100644 --- a/cumulusci/tasks/tests/test_dx_convert_from.py +++ b/cumulusci/tasks/tests/test_dx_convert_from.py @@ -51,7 +51,7 @@ def test_dx_convert_from(sarge, sarge_process, dx_convert_task): assert not src_dir.exists() sarge.Command.assert_called_once_with( - "sfdx force:source:convert -d src", + "sf project convert source -d src", cwd=".", env=ANY, shell=True, diff --git a/cumulusci/tasks/tests/test_sfdx.py b/cumulusci/tasks/tests/test_sfdx.py index b2e443e45b..a2b5a46a81 100644 --- a/cumulusci/tasks/tests/test_sfdx.py +++ b/cumulusci/tasks/tests/test_sfdx.py @@ -37,7 +37,7 @@ def setup_method(self): def test_base_task(self): """The command is prefixed w/ sfdx""" - self.task_config.config["options"] = {"command": "force:org", "extra": "--help"} + self.task_config.config["options"] = {"command": "org", "extra": "--help"} task = SFDXBaseTask(self.project_config, self.task_config) try: @@ -45,14 +45,14 @@ def test_base_task(self): except CommandException: pass - assert task.options["command"] == "force:org" - assert task._get_command() == "sfdx force:org --help" + assert task.options["command"] == "org" + assert task._get_command() == "sf org --help" @patch("cumulusci.tasks.command.Command._run_task", MagicMock(return_value=None)) def test_keychain_org_creds(self): """Keychain org creds are passed by env var""" - self.task_config.config["options"] = {"command": "force:org --help"} + self.task_config.config["options"] = {"command": "org --help"} access_token = "00d123" org_config = OrgConfig( { @@ -71,24 +71,26 @@ def refresh_oauth_token(keychain): task() org_config.refresh_oauth_token.assert_called_once() - assert "SFDX_INSTANCE_URL" in task._get_env() - assert "SFDX_DEFAULTUSERNAME" in task._get_env() - assert access_token in task._get_env()["SFDX_DEFAULTUSERNAME"] + print(task._get_env()) + assert "SF_ORG_INSTANCE_URL" in task._get_env() + assert "SF_TARGET_ORG" in task._get_env() + assert access_token in task._get_env()["SF_TARGET_ORG"] def test_scratch_org_username(self): """Scratch Org credentials are passed by -u flag""" - self.task_config.config["options"] = {"command": "force:org --help"} + self.task_config.config["options"] = {"command": "org --help"} org_config = ScratchOrgConfig({"username": "test@example.com"}, "test") task = SFDXOrgTask(self.project_config, self.task_config, org_config) - assert "-u test@example.com" in task._get_command() + assert "-o test@example.com" in task._get_command() class TestSFDXJsonTask: def test_get_command(self): task = create_task(SFDXJsonTask) command = task._get_command() - assert command == "sfdx force:mdapi:deploy --json" + print(command) + assert command == "sf project deploy start --json" def test_process_output(self): task = create_task(SFDXJsonTask) diff --git a/cumulusci/tasks/vlocity/tests/test_vlocity.py b/cumulusci/tasks/vlocity/tests/test_vlocity.py index 7fa45536ad..03c0fbe3b4 100644 --- a/cumulusci/tasks/vlocity/tests/test_vlocity.py +++ b/cumulusci/tasks/vlocity/tests/test_vlocity.py @@ -247,7 +247,7 @@ def test_deploy_omni_studio_site_settings( # The frequent error is: # # "name": "NoOrgFound", -# "action": "Run the \"sfdx force:auth\" commands with --setdefaultusername to connect to an org and set it as your default org.\nRun \"force:org:create\" with --setdefaultusername to create a scratch org and set it as your default org.\nRun \"sfdx force:config:set defaultusername=\" to set your default username." +# "action": "Run the \"sf auth\" commands with --target-org to connect to an org and set it as your default org.\nRun \"org create scratch\" with --target-org to create a scratch org and set it as your default org.\nRun \"sf config set target-org=\" to set your default username." # } diff --git a/cumulusci/tasks/vlocity/vlocity.py b/cumulusci/tasks/vlocity/vlocity.py index 2071a61c83..c1d0f68085 100644 --- a/cumulusci/tasks/vlocity/vlocity.py +++ b/cumulusci/tasks/vlocity/vlocity.py @@ -105,8 +105,8 @@ def _add_token_to_sfdx(self, access_token: str, instance_url: str) -> str: """ # TODO: Use the sf v2 form of this command instead (when we migrate) token_store_cmd = [ - "sfdx", - "force:auth:accesstoken:store", + "sf", + "org login access-token", "--no-prompt", "--alias", f"{VBT_SF_ALIAS}", diff --git a/cumulusci/utils/tests/test_fileutils.py b/cumulusci/utils/tests/test_fileutils.py index 7b09afd2dd..65b0899950 100644 --- a/cumulusci/utils/tests/test_fileutils.py +++ b/cumulusci/utils/tests/test_fileutils.py @@ -199,7 +199,7 @@ def test_resource_test_resource_doesnt_exist_pathlib_relpath(self): class TestFSResourceTempdir(_TestFSResourceShared): - def setup(self): + def setup_method(self): self.tempdir = TemporaryDirectory() self.file = Path(self.tempdir.name) / "testfile.txt" self.file.touch() diff --git a/cumulusci/utils/xml/metadata_tree.py b/cumulusci/utils/xml/metadata_tree.py index 60f6662d79..70b979a4c6 100644 --- a/cumulusci/utils/xml/metadata_tree.py +++ b/cumulusci/utils/xml/metadata_tree.py @@ -49,6 +49,20 @@ def fromstring(source): return MetadataElement(lxml_parse_string(source).getroot()) +def parse_package_xml_types(feildName, source_xml_tree): + """ "Parse metadata types based on the feildName and map based on the type""" + xml_map = {} + for type in source_xml_tree.types: + members = [] + try: + for member in type.members: + members.append(member.text) + except AttributeError: # Exception if there are no members for a type + pass + xml_map[type[feildName].text] = members + return xml_map + + class MetadataElement: '''A class for representing Metadata in a Pythonic tree. diff --git a/cumulusci/utils/xml/robot_xml.py b/cumulusci/utils/xml/robot_xml.py index 06a8c20423..09658b504d 100644 --- a/cumulusci/utils/xml/robot_xml.py +++ b/cumulusci/utils/xml/robot_xml.py @@ -1,7 +1,7 @@ import re from typing import Callable, Dict, NamedTuple -from robot.api import ExecutionResult, ResultVisitor +from robot.api import ExecutionResult, ResultVisitor # type: ignore from robot.result.model import TestCase UNITS = { @@ -53,7 +53,8 @@ def log_perf_summary_from_xml( Supply a formatter that takes a PerfSummary triple if the default isn't a good fit: - f(test_name: str, metrics: Dict[str, float], test: robot.result.model.TestCase)""" + f(test_name: str, metrics: Dict[str, float], test: robot.result.model.TestCase) + """ result = ExecutionResult(robot_xml) pl = _perf_logger(logger_func, formatter_func) next(pl) # start the generator diff --git a/cumulusci/utils/xml/test/test_metadata_tree.py b/cumulusci/utils/xml/test/test_metadata_tree.py index 4d0943fb52..2ffb3e573a 100644 --- a/cumulusci/utils/xml/test/test_metadata_tree.py +++ b/cumulusci/utils/xml/test/test_metadata_tree.py @@ -3,7 +3,12 @@ import pytest -from cumulusci.utils.xml.metadata_tree import METADATA_NAMESPACE, fromstring, parse +from cumulusci.utils.xml.metadata_tree import ( + METADATA_NAMESPACE, + fromstring, + parse, + parse_package_xml_types, +) standard_xml = f""" Foo @@ -230,3 +235,17 @@ def test_namespaced_to_string__output_namespaces(self): assert ( " ".join(xml_out.split()).strip() == " ".join(expected_out.split()).strip() ), xml_out.strip() + + def test_parse_package_xml_types(self): + from cumulusci.tasks.metadata import tests + + path = ( + Path(tests.__file__).parent + / "package_metadata/namespaced_report_folder/package.xml" + ) + tree = parse(path) + result = parse_package_xml_types("name", tree) + + expected = {"Report": ["namespace__TestFolder/TestReport"]} + + assert result == expected diff --git a/cumulusci/utils/yaml/cumulusci_yml.py b/cumulusci/utils/yaml/cumulusci_yml.py index f8498ed9ea..93516ed654 100644 --- a/cumulusci/utils/yaml/cumulusci_yml.py +++ b/cumulusci/utils/yaml/cumulusci_yml.py @@ -278,9 +278,6 @@ class ErrorDict(TypedDict): type: str -has_shown_yaml_error_message = False - - def _log_yaml_errors(logger, errors: List[ErrorDict]): "Format and log a Pydantic-style error dictionary" global has_shown_yaml_error_message @@ -289,18 +286,6 @@ def _log_yaml_errors(logger, errors: List[ErrorDict]): for error in errors: loc = " -> ".join(repr(x) for x in error["loc"] if x != "__root__") logger.warning(" %s\n %s", loc, error["msg"]) - if not has_shown_yaml_error_message: - logger.error( - "NOTE: These warnings will become errors on Sept 30, 2022.\n\n" - "If you need to put non-standard data in your CumulusCI file " - "(for some form of project-specific setting), put it in " - "the `project: custom:` section of `cumulusci.yml` ." - ) - logger.error( - "If you think your YAML has no error, please report the bug to the CumulusCI team." - ) - logger.error("https://github.com/SFDO-Tooling/CumulusCI/issues/\n") - has_shown_yaml_error_message = True def cci_safe_load( diff --git a/cumulusci/utils/yaml/model_parser.py b/cumulusci/utils/yaml/model_parser.py index b165b8f409..249d914fbd 100644 --- a/cumulusci/utils/yaml/model_parser.py +++ b/cumulusci/utils/yaml/model_parser.py @@ -1,5 +1,5 @@ -from pathlib import Path, Sequence -from typing import IO, Union +from pathlib import Path +from typing import IO, Sequence, Union from pydantic import BaseModel, ValidationError from pydantic.error_wrappers import ErrorWrapper diff --git a/cumulusci/utils/ziputils.py b/cumulusci/utils/ziputils.py index dabb365eb0..f1ae0ead3e 100644 --- a/cumulusci/utils/ziputils.py +++ b/cumulusci/utils/ziputils.py @@ -3,7 +3,7 @@ import zipfile -def zip_subfolder(zip_src, path): +def zip_subfolder(zip_src: zipfile.ZipFile, path): if not path.endswith("/"): path = path + "/" diff --git a/datasets/default/default.mapping.yml b/datasets/default/default.mapping.yml new file mode 100644 index 0000000000..845816ab1f --- /dev/null +++ b/datasets/default/default.mapping.yml @@ -0,0 +1,20 @@ +Account: + sf_object: Account + api: bulk + batch_size: 1 + fields: + - Name + - Description + - ShippingStreet + - ShippingCity + - ShippingState + - ShippingPostalCode + - ShippingCountry + - Phone + - AccountNumber +Contact: + sf_object: Contact + api: bulk + batch_size: 1 + fields: + - FirstName diff --git a/docs/conf.py b/docs/conf.py index 57be1f00a8..62c1e4e8fc 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -245,7 +245,9 @@ "index", "cumulusci.tex", "CumulusCI Documentation", - "Cumulus Suite Development Team", + """Kamalnath Devarakonda\\\\ +Sr Director Software Engineering\\\\ +kdevarakonda@salesforce.com""", "manual", ) ] diff --git a/docs/cookbook.md b/docs/cookbook.md index 3b175a169e..3ed54f35e1 100644 --- a/docs/cookbook.md +++ b/docs/cookbook.md @@ -43,7 +43,7 @@ run_custom_command: The `dx` task lets you run an arbitrary `sfdx` command. You can perform this with `cci` on a terminal: - $ cci task run dx -o command 'force:api:limits:display' + $ cci task run dx -o command 'limits api display' Or you can utilize the same `class_path` as the `dx` task and make a custom task that can be executed by itself or as a step in a flow. @@ -54,7 +54,7 @@ dx_limits: class_path: cumulusci.tasks.sfdx.SFDXBaseTask group: dx options: - command: sfdx force:limits:api:display + command: sf limits api display ``` In this case, we actually utilize `SFDXBaseTask`, if you would like to @@ -64,7 +64,7 @@ instead. ### Custom Deploy It is often useful to be able to define multiple custom deployment tasks -that deployg a specific subset of your projects metadata. This is +that deploy a specific subset of your projects metadata. This is particularly true when working with [unpackaged Metadata](unpackaged). Here is a custom task that is defined to only deploy only the metadata diff --git a/docs/data.md b/docs/data.md index 063e3f33f5..9badb404e8 100644 --- a/docs/data.md +++ b/docs/data.md @@ -250,6 +250,131 @@ Insert Accounts: Whenever `update_key` is supplied, the action must be `upsert` and vice versa. +--- + +### Selects + +The `select` functionality is designed to streamline the mapping process by enabling the selection of specific records directly from Salesforce for lookups. This feature is particularly useful when dealing with non-insertable Salesforce objects and ensures that pre-existing records are used rather than inserting new ones. The selection process is highly customizable with various strategies, filters, and additional capabilities that provide flexibility and precision in data mapping. + +```yaml +Account: + sf_object: Account + fields: + - Name + - Description + +Contact: + sf_object: Contact + fields: + - LastName + - Email + lookups: + AccountId: + table: Account + +Lead: + sf_object: Lead + fields: + - LastName + - Company + +Event: + sf_object: Event + action: select + select_options: + strategy: similarity + filter: WHERE Subject LIKE 'Meeting%' + priority_fields: + - Subject + - WhoId + threshold: 0.3 + fields: + - Subject + - DurationInMinutes + - ActivityDateTime + lookups: + WhoId: + table: + - Contact + - Lead + WhatId: + table: Account +``` + +--- + +#### Selection Strategies + +The `strategy` parameter determines how records are selected from the target org. It is **optional**; if no strategy is specified, the `standard` strategy will be applied by default. + +- **`standard` Strategy:** + The `standard` selection strategy retrieves records from target org in the same order as they appear, applying any specified filters and sorting criteria. This method ensures that records are selected without any prioritization based on similarity or randomness, offering a straightforward way to pull the desired data. + +- **`similarity` Strategy:** + The `similarity` strategy is used when you need to find records in the target org that closely resemble the records defined in your SQL file. This strategy performs a similarity match between the records in the SQL file and those in the target org. In addition to comparing the fields of the record itself, this strategy includes the fields of parent records (up to one level) for a more granular and accurate match. + +- **`random` Strategy:** + The `random` selection strategy randomly assigns records picked from the target org. This method is useful when the selection order does not matter, and you simply need to fetch records in a randomized manner. + +--- + +#### Selection Filters + +The selection `filter` provides a flexible way to refine the records selected by using any functionality supported by SOQL. This includes filtering, sorting, and limiting records based on specific conditions, such as using the `WHERE` clause to filter records by field values, the `ORDER BY` clause to sort records in ascending or descending order, and the `LIMIT` clause to restrict the number of records returned. Essentially, any feature available in SOQL for record selection is supported here, allowing you to tailor the selection process to your precise needs and ensuring only the relevant records are included in the mapping process. + +This parameter is **optional**; and if not specified, no filter will apply. + +--- + +#### Priority Fields + +The `priority_fields` feature enables you to specify a subset of fields in your mapping step that will have more weight during the similarity matching process. When similarity matching is performed, these priority fields will be given greater importance compared to other fields, allowing for a more refined match. + +This parameter is **optional**; and if not specified, all fields will be considered with same priority. + +This feature is particularly useful when certain fields are more critical in defining the identity or relevance of a record, ensuring that these fields have a stronger influence in the selection process. + +--- + +#### Threshold + +This feature allows you to either select or insert records based on a similarity threshold. When using the `select` action with the `similarity` strategy, you can specify a `threshold` value between `0` and `1`, where `0` represents a perfect match and `1` signifies no similarity. + +- **Select Records:** + If a record from your SQL file has a similarity score below the threshold, it will be selected from the target org. + +- **Insert Records:** + If the similarity score exceeds the threshold, the record will be inserted into the target org instead of being selected. + +This parameter is **optional**; if not specified, no threshold will be applied and all records will default to be selected. + +This feature is particularly useful during version upgrades, where records that closely match can be selected, while those that do not match sufficiently can be inserted into the target org. + +--- + +#### Example + +To demonstrate the `select` functionality, consider the example of the `Event` entity, which utilizes the `similarity` strategy, a filter condition, and other advanced options to select matching records effectively as given in the yaml above. + +1. **Basic Object Configuration**: + + - The `Account`, `Contact`, and `Lead` objects are configured for straightforward field mapping. + - A `lookup` is defined on the `Contact` object to map `AccountId` to the `Account` table. + +2. **Advanced `Event` Object Mapping**: + - **Action**: The `Event` object uses the `select` action, meaning records are selected rather than inserted. + - **Strategy**: The `similarity` strategy matches `Event` records in target org that are similar to those defined in the SQL file. + - **Filter**: Only `Event` records with a `Subject` field starting with "Meeting" are considered. + - **Priority Fields**: The `Subject` and `WhoId` fields are given more weight during similarity matching. + - **Threshold**: A similarity score of 0.3 is used to determine whether records are selected or inserted. + - **Lookups**: + - The `WhoId` field looks up records from either the `Contact` or `Lead` objects. + - The `WhatId` field looks up records from the `Account` object. + +This example highlights how the `select` functionality can be applied in real-world scenarios, such as selecting `Event` records that meet specific criteria while considering similarity, filters, and priority fields. + +--- + ### Database Mapping CumulusCI's definition format includes considerable flexibility for use diff --git a/docs/env-var-reference.md b/docs/env-var-reference.md index b9f5628e5c..cc01c8a798 100644 --- a/docs/env-var-reference.md +++ b/docs/env-var-reference.md @@ -69,5 +69,4 @@ org, e.g. a Dev Hub. Set with SFDX_CLIENT_ID. ## `SFDX_ORG_CREATE_ARGS` -Extra arguments passed to `sfdx force:org:create`. Can be used to pass -key-value pairs. +Extra arguments passed to `sf org create scratch`. diff --git a/docs/get-started.md b/docs/get-started.md index 545f22d57a..d3b702d581 100644 --- a/docs/get-started.md +++ b/docs/get-started.md @@ -180,12 +180,12 @@ To set up Salesforce DX: Org](https://developer.salesforce.com/docs/atlas.en-us.228.0.sfdx_dev.meta/sfdx_dev/sfdx_setup_enable_devhub.htm) 3. [Connect SFDX to Your Dev Hub Org](https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_auth_web_flow.htm) - - Be sure to use the `--setdefaultdevhubusername` option! + Be sure to use the `--set-default-dev-hub` option! -If you have the `sfdx` command installed, are connected to your Dev Hub, -and set the `defaultdevhubusername` config setting (use -`sfdx force:config:list` to verify), you're now ready to use `cci` with -`sfdx` to build scratch orgs. +If you have the `sf` command installed, are connected to your Dev Hub, +and set the `target-dev-hub` config setting (use +`sf config list` to verify), you're now ready to use `cci` with +`sf` to build scratch orgs. ```{important} SFDX supports multiple Dev Hubs, so CumulusCI uses the one set as @@ -464,7 +464,7 @@ package namespace matches the namespace you entered when running command to extract your package metadata. ```console -$ sfdx force:source:retrieve -n package_name /path/to/project/ +$ sf project retrieve start -n package_name /path/to/project/ ``` That's it! You now have all of the metadata you care about in a single diff --git a/docs/github-actions.md b/docs/github-actions.md index f6950f6e70..08945a268d 100644 --- a/docs/github-actions.md +++ b/docs/github-actions.md @@ -181,15 +181,15 @@ The Cumulus Suite Actions **require CumulusCI 3.61.1 or greater** for any operat All Actions that interact with persistent orgs (such as a packaging org or Dev Hub) authorize those orgs using SFDX Auth URLs. These URLs are obtained via by first authorizing an org to the CLI: -`sfdx auth:web:login -a packaging` +`sf org login web -a packaging` and then retrieving the auth URL from the JSON output of the command -`sfdx force:org:display --json --verbose` +`sf org display --json --verbose` under the key `sfdxAuthUrl` under `result`. -If you have `jq` installed, you can do `sfdx force:org:display -u packaging-gh --json --verbose | jq -r .result.sfdxAuthUrl`. +If you have `jq` installed, you can do `sf org display -u packaging-gh --json --verbose | jq -r .result.sfdxAuthUrl`. First-generation package projects will have two auth-URL secrets, for the packaging org and for the Dev Hub. Second-generation and Unlocked package projects will have at least one auth-URL secret, for the Dev Hub, and may have diff --git a/docs/headless.md b/docs/headless.md index ae43464008..78fb913152 100644 --- a/docs/headless.md +++ b/docs/headless.md @@ -121,7 +121,7 @@ and then use it directly from CumulusCI. To do so, follow these steps. 1. Retrieve your auth URL. -1. Authorize the org using `sfdx auth:sfdxurl:store`. +1. Authorize the org using `sf org login sfdx-url`. 1. Run `cci org import `. ### JWT Flow Authorization diff --git a/docs/history.md b/docs/history.md index ad0b0afef7..5ff75547d1 100644 --- a/docs/history.md +++ b/docs/history.md @@ -2,6 +2,180 @@ +## v4.0.1 (2024-11-18) + +### Issues Fixed 🩴 + +- Fixed a ModuleNotFoundError for docutils by adding the dependency `docutils` by [@jstvz](https://github.com/jstvz) in [#3558](https://github.com/SFDO-Tooling/CumulusCI/pull/3558). + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.0...v4.0.1 + + + +## v4.0.0 (2024-11-12) + +## What's Changed + +### Critical Changes 🛠 + +- Python versions 3.8, 3.9, and 3.10 are no longer supported. +- Switch to `sf` CLI commands [@lakshmi2506](https://github.com/lakshmi2506) in [#3829](https://github.com/SFDO-Tooling/CumulusCI/pull/3829) + - Removed the `dx_pull` and `dx_push` tasks as the underlying commands were removed from `sf` + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.92.0...v4.0.0 + +## v3.93.0 (2024-10-17) + + + +## What's Changed + +### Changes 🎉 + +- fix: escape stage name when running ensure_record_type task by [@leo-dcfa](https://github.com/leo-dcfa) in [#3827](https://github.com/SFDO-Tooling/CumulusCI/pull/3827) +- fix: Upgrade upload-artifact action by [@jstvz](https://github.com/jstvz) in [#3831](https://github.com/SFDO-Tooling/CumulusCI/pull/3831) +- Fix `TypeError` when permissionSetGroup has a mutingPermissionSet by [@lakshmi2506](https://github.com/lakshmi2506) in [#3834](https://github.com/SFDO-Tooling/CumulusCI/pull/3834) +- Add `check_components` preflight to detect metadata component conflicts (#3837) by [@vsbharath](https://github.com/vsbharath) in [#3837](https://github.com/SFDO-Tooling/CumulusCI/pull/3837) + +## New Contributors + +- @leo-dcfa made their first contribution in [#3827](https://github.com/SFDO-Tooling/CumulusCI/pull/3827) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.92.0...v3.93.0 + +## v3.92.0 (2024-09-18) + + + +## What's Changed + +### Changes 🎉 + +- fix: Update input handlers with new Salesforce lightning-primitive tags by [@chanyahn-st](https://github.com/chanyahn-st) in [#3822](https://github.com/SFDO-Tooling/CumulusCI/pull/3822) + +## New Contributors + +- @chanyahn-st made their first contribution in [#3822](https://github.com/SFDO-Tooling/CumulusCI/pull/3822) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.91.0...v3.92.0 + +## v3.91.0 (2024-08-22) + + + +## What's Changed + +### Changes 🎉 + +- Fix to Include All Keys in standardValue XML for add_standard_value_set_entries Task from Entries Option with Backward Compatibility by [@lakshmi2506](https://github.com/lakshmi2506) in [#3820](https://github.com/SFDO-Tooling/CumulusCI/pull/3820) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.90.1...v3.91.0 + +## v3.90.1 (2024-08-20) + + + +## What's Changed + +### Changes 🎉 + +- Fix to Suppress the required field check for the upsert action at mapping step by [@lakshmi2506](https://github.com/lakshmi2506) in [#3817](https://github.com/SFDO-Tooling/CumulusCI/pull/3817) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.90.0...v3.90.1 + +## v3.90.0 (2024-07-15) + + + +## What's Changed + +### Changes 🎉 + +- Add tasks to extract and upload files (Salesforce Files) by [@mjawadtp](https://github.com/mjawadtp) in [#3801](https://github.com/SFDO-Tooling/CumulusCI/pull/3801) +- Update the validation for loading to check the required fields as well by [@lakshmi2506](https://github.com/lakshmi2506) in [#3807](https://github.com/SFDO-Tooling/CumulusCI/pull/3807) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.89.0...v3.90.0 + +## v3.89.0 (2024-06-20) + + + +## What's Changed + +### Changes 🎉 + +- Added the `list_files` task to display files in an org by [@lakshmi2506](https://github.com/lakshmi2506) in [#3799](https://github.com/SFDO-Tooling/CumulusCI/pull/3799) +- The `api_names` option of the `assign_permission_set_licenses` task now accepts either `DeveloperName` or `PermissionSetLicenseKey` by [@lakshmi2506](https://github.com/lakshmi2506) in [#3798](https://github.com/SFDO-Tooling/CumulusCI/pull/3798) + +## v3.88.0 (2024-05-24) + + + +## What's Changed + +### Changes 🎉 + +- Extend modal close wait with custom timeout by [@leboff](https://github.com/leboff) in [#3783](https://github.com/SFDO-Tooling/CumulusCI/pull/3783) +- Added check_output true to get the logs of metadata retrieval by [@lakshmi2506](https://github.com/lakshmi2506) in [#3789](https://github.com/SFDO-Tooling/CumulusCI/pull/3789) + +### Issues Fixed 🩴 + +- Fix retrieve unpackaged so it is usable in metadeploy by [@yippie](https://github.com/yippie) in [#3566](https://github.com/SFDO-Tooling/CumulusCI/pull/3566) + +## New Contributors + +- @yippie made their first contribution in [#3566](https://github.com/SFDO-Tooling/CumulusCI/pull/3566) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.87.0...v3.88.0 + +## v3.87.0 (2024-05-17) + + + +## What's Changed + +### Changes 🎉 + +- Don't add fullname tag in 2GP package.xml by [@leboff](https://github.com/leboff) in [#3748](https://github.com/SFDO-Tooling/CumulusCI/pull/3748) + +## New Contributors + +- @leboff made their first contribution in [#3748](https://github.com/SFDO-Tooling/CumulusCI/pull/3748) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.86.1...v3.87.0 + +## v3.86.1 (2024-05-06) + + + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.86.0...v3.86.1 + +## v3.86.0 (2024-03-27) + + + +## What's Changed + +### Changes 🎉 + +- @W-15258380 - Reloading of login URL's from env before creating jwt session by [@lakshmi2506](https://github.com/lakshmi2506) in [#3765](https://github.com/SFDO-Tooling/CumulusCI/pull/3765) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.85.0...v3.86.0 + +## v3.85.0 (2024-03-13) + + + +## What's Changed + +### Changes 🎉 + +- Fix Invalid Lookups of namespaced fields by [@aditya-balachander](https://github.com/aditya-balachander) in [#3759](https://github.com/SFDO-Tooling/CumulusCI/pull/3759) +- Fix Id Not Specified on Update Call by [@aditya-balachander](https://github.com/aditya-balachander) in [#3761](https://github.com/SFDO-Tooling/CumulusCI/pull/3761) +- Fix dependency parsing errors caused by `collision_check` option by [@jstvz](https://github.com/jstvz) in [#3760](https://github.com/SFDO-Tooling/CumulusCI/pull/3760) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.84.3...v3.85.0 + ## v3.84.3 (2024-02-24) @@ -14,8 +188,6 @@ **Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.84.2...v3.84.3 - - ## v3.84.2 (2024-02-20) diff --git a/docs/managed-2gp.md b/docs/managed-2gp.md index 840de68506..83b77f70df 100644 --- a/docs/managed-2gp.md +++ b/docs/managed-2gp.md @@ -170,7 +170,7 @@ the GitHub release operations: $ cci task run promote_package_version --version_id 04t000000000000 --promote_dependencies True ``` -Alternatively, you can use the `sfdx force:package:version:promote` +Alternatively, you can use the `sf package version promote` command to promote a 2GP package. Note that using this command will also not perform any release operations in GitHub. diff --git a/docs/scratch-orgs.md b/docs/scratch-orgs.md index 93e46f1203..83b5c90fa5 100644 --- a/docs/scratch-orgs.md +++ b/docs/scratch-orgs.md @@ -115,7 +115,7 @@ Scratch org limits are based on your Dev Hub's edition and your Salesforce contract. To review limits and consumption, run the command: ```console -$ sfdx force:limits:api:display -u +$ sf org list limits --target-org ``` `` is your Dev Hub username. The limit names are diff --git a/docs/unlocked-package.md b/docs/unlocked-package.md index b4e770131f..553478e087 100644 --- a/docs/unlocked-package.md +++ b/docs/unlocked-package.md @@ -168,7 +168,7 @@ the GitHub release operations: $ cci task run promote_package_version --version_id 04t000000000000 --promote_dependencies True ``` -Alternatively, you can use the `sfdx force:package:version:promote` +Alternatively, you can use the `sf package version promote` command to promote a 2GP package. ### Promote Dependencies diff --git a/pyproject.toml b/pyproject.toml index 1d94df527f..7dec9eedab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ name = "cumulusci" dynamic = ["readme", "version"] description = "Build and release tools for Salesforce developers" license = { text = "BSD 3-Clause License" } -requires-python = ">=3.8" +requires-python = ">=3.11" authors = [ { name = "Salesforce.org", email = "sfdo-mrbelvedere@salesforce.com" }, ] @@ -18,17 +18,15 @@ classifiers = [ "License :: OSI Approved :: BSD License", "Natural Language :: English", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] dependencies = [ - "click", + "annoy", + "click>=8.1", "cryptography", "python-dateutil", - "docutils<0.17", "Faker", "fs", "github3.py", @@ -36,8 +34,9 @@ dependencies = [ "keyring<=23.0.1", "defusedxml", "lxml", - "markdown-it-py==2.2.0", # resolve dependency conflict between prod/dev "MarkupSafe", + "numpy", + "pandas", "psutil", "pydantic<2", "PyJWT", @@ -45,38 +44,48 @@ dependencies = [ "pyyaml", "requests", "requests-futures", - "rich", + "rich>=13.9.4", "robotframework", + "SQLAlchemy<2", "robotframework-pabot", "robotframework-requests", "robotframework-seleniumlibrary<6", - "rst2ansi", + "rst2ansi>=0.1.5", "salesforce-bulk", "sarge", + "scikit-learn", "selenium<4", "simple-salesforce==1.11.4", - "snowfakery", - "SQLAlchemy", + "snowfakery>=4.0.0", "xmltodict", + "docutils>=0.21.2", ] -[project.optional-dependencies] -docs = ["myst-parser", "Sphinx"] -lint = ["black", "flake8<4", "isort", "pre-commit"] -test = [ - "coverage[toml]", - "factory-boy", - "furo", - "jsonschema", - "pytest<7.1 ", # https://github.com/pytest-dev/pytest/issues/9765 - "pytest-cov", - "pytest-random-order", - "pytest-vcr", - "responses", - "testfixtures", - "tox", - "typeguard<=2.13.3", # TODO: Lots of changes required for v4 - "vcrpy" +[dependency-groups] +docs = [ + "myst-parser>=1.0.0", + "sphinx>=5.3.0", +] +dev = [ + "coverage[toml]>=7.6.1", + "factory-boy>=3.3.1", + "furo>=2023.3.27", + "jsonschema>=4.23.0", + "pytest>=7.0.1", + "pytest-cov>=5.0.0", + "pytest-random-order>=1.1.1", + "pytest-vcr>=1.0.2", + "responses>=0.23.1", + "testfixtures>=8.3.0", + "tox>=4.20.0", + "typeguard<=2.13.3", # TODO: Lots of changes required for v4 + "vcrpy>=6.0.2", +] +lint = [ + "black>=24.8.0", + "flake8<4", + "isort>=5.13.2", + "pre-commit>=3.5.0", ] [project.scripts] @@ -102,16 +111,13 @@ include = [ include = [ "/cumulusci", "/requirements/*", # Needed by tox - "README.md", # needed by hatch-fancy-pypi-readme - "docs/history.md" + "README.md", # needed by hatch-fancy-pypi-readme + "docs/history.md", # ditto ] [tool.hatch.build.targets.wheel] -exclude = [ - "*.sql", - "*.zip" -] +exclude = ["*.sql", "*.zip"] [tool.hatch.metadata.hooks.fancy-pypi-readme] content-type = "text/markdown" @@ -254,7 +260,7 @@ include = [ 'cumulusci/tests/util.py', 'cumulusci/utils/waiting.py', 'cumulusci/utils/xml/robot_xml.py', - 'cumulusci/utils/ziputils.py' + 'cumulusci/utils/ziputils.py', ] # Do not add to this list. Instead use # # pyright: strict @@ -290,5 +296,5 @@ strict = [ 'cumulusci/tasks/release_notes/exceptions.py', 'cumulusci/tasks/salesforce/BaseSalesforceTask.py', 'cumulusci/tasks/vlocity/exceptions.py', - 'cumulusci/utils/soql.py' + 'cumulusci/utils/soql.py', ] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 5eaadb8e90..0000000000 --- a/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --r requirements/prod.txt diff --git a/requirements/dev.txt b/requirements/dev.txt deleted file mode 100644 index 59bd22d4cf..0000000000 --- a/requirements/dev.txt +++ /dev/null @@ -1,387 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --all-extras --output-file=requirements/dev.txt pyproject.toml -# -alabaster==0.7.13 - # via sphinx -appdirs==1.4.4 - # via fs -attrs==23.1.0 - # via - # jsonschema - # pytest - # referencing -authlib==1.2.1 - # via simple-salesforce -babel==2.13.1 - # via sphinx -beautifulsoup4==4.12.2 - # via furo -black==23.11.0 - # via cumulusci (pyproject.toml) -cachetools==5.3.2 - # via tox -certifi==2023.7.22 - # via - # requests - # snowfakery -cffi==1.16.0 - # via cryptography -cfgv==3.4.0 - # via pre-commit -chardet==5.2.0 - # via tox -charset-normalizer==3.2.0 - # via - # requests - # snowfakery -click==8.1.6 - # via - # black - # cumulusci (pyproject.toml) - # snowfakery -colorama==0.4.6 - # via tox -coverage[toml]==7.3.2 - # via - # cumulusci (pyproject.toml) - # pytest-cov -cryptography==41.0.7 - # via - # authlib - # cumulusci (pyproject.toml) - # pyjwt - # secretstorage -defusedxml==0.7.1 - # via cumulusci (pyproject.toml) -distlib==0.3.7 - # via virtualenv -docutils==0.16 - # via - # cumulusci (pyproject.toml) - # myst-parser - # sphinx -factory-boy==3.3.0 - # via cumulusci (pyproject.toml) -faker==19.3.0 - # via - # cumulusci (pyproject.toml) - # factory-boy - # faker-edu - # faker-nonprofit - # snowfakery -faker-edu==1.0.0 - # via snowfakery -faker-nonprofit==1.0.0 - # via snowfakery -filelock==3.13.1 - # via - # tox - # virtualenv -flake8==3.9.2 - # via cumulusci (pyproject.toml) -fs==2.4.16 - # via cumulusci (pyproject.toml) -furo==2023.3.27 - # via cumulusci (pyproject.toml) -github3-py==4.0.1 - # via cumulusci (pyproject.toml) -greenlet==3.0.1 - # via sqlalchemy -gvgen==1.0 - # via snowfakery -identify==2.5.32 - # via pre-commit -idna==3.4 - # via - # requests - # snowfakery - # yarl -imagesize==1.4.1 - # via sphinx -importlib-metadata==6.8.0 - # via - # keyring - # sphinx -importlib-resources==6.1.1 - # via - # jsonschema - # jsonschema-specifications -iniconfig==2.0.0 - # via pytest -isort==5.12.0 - # via cumulusci (pyproject.toml) -jeepney==0.8.0 - # via - # keyring - # secretstorage -jinja2==3.1.2 - # via - # cumulusci (pyproject.toml) - # myst-parser - # snowfakery - # sphinx -jsonschema==4.20.0 - # via cumulusci (pyproject.toml) -jsonschema-specifications==2023.11.2 - # via jsonschema -keyring==23.0.1 - # via cumulusci (pyproject.toml) -lxml==4.9.3 - # via cumulusci (pyproject.toml) -markdown-it-py==2.2.0 - # via - # cumulusci (pyproject.toml) - # mdit-py-plugins - # myst-parser - # rich -markupsafe==2.1.3 - # via - # cumulusci (pyproject.toml) - # jinja2 - # snowfakery -mccabe==0.6.1 - # via flake8 -mdit-py-plugins==0.3.5 - # via myst-parser -mdurl==0.1.2 - # via markdown-it-py -multidict==6.0.4 - # via yarl -mypy-extensions==1.0.0 - # via black -myst-parser==1.0.0 - # via cumulusci (pyproject.toml) -natsort==8.4.0 - # via robotframework-pabot -nodeenv==1.8.0 - # via pre-commit -packaging==23.2 - # via - # black - # pyproject-api - # pytest - # sphinx - # tox -pathspec==0.11.2 - # via black -pkgutil-resolve-name==1.3.10 - # via jsonschema -platformdirs==4.0.0 - # via - # black - # tox - # virtualenv -pluggy==1.3.0 - # via - # pytest - # tox -pre-commit==3.5.0 - # via cumulusci (pyproject.toml) -psutil==5.9.6 - # via cumulusci (pyproject.toml) -py==1.11.0 - # via pytest -pycodestyle==2.7.0 - # via flake8 -pycparser==2.21 - # via cffi -pydantic==1.10.12 - # via - # cumulusci (pyproject.toml) - # snowfakery -pyflakes==2.3.1 - # via flake8 -pygments==2.17.2 - # via - # furo - # rich - # sphinx -pyjwt[crypto]==2.8.0 - # via - # cumulusci (pyproject.toml) - # github3-py -pyproject-api==1.6.1 - # via tox -pytest==7.0.1 - # via - # cumulusci (pyproject.toml) - # pytest-cov - # pytest-random-order - # pytest-vcr -pytest-cov==4.1.0 - # via cumulusci (pyproject.toml) -pytest-random-order==1.1.0 - # via cumulusci (pyproject.toml) -pytest-vcr==1.0.2 - # via cumulusci (pyproject.toml) -python-baseconv==1.2.2 - # via snowfakery -python-dateutil==2.8.2 - # via - # cumulusci (pyproject.toml) - # faker - # github3-py - # snowfakery -pytz==2023.3.post1 - # via - # babel - # cumulusci (pyproject.toml) -pyyaml==6.0.1 - # via - # cumulusci (pyproject.toml) - # myst-parser - # pre-commit - # responses - # snowfakery - # vcrpy -referencing==0.31.1 - # via - # jsonschema - # jsonschema-specifications -requests==2.29.0 - # via - # cumulusci (pyproject.toml) - # github3-py - # requests-futures - # responses - # robotframework-requests - # salesforce-bulk - # simple-salesforce - # snowfakery - # sphinx -requests-futures==1.0.1 - # via cumulusci (pyproject.toml) -responses==0.23.1 - # via cumulusci (pyproject.toml) -rich==13.7.0 - # via cumulusci (pyproject.toml) -robotframework==6.1.1 - # via - # cumulusci (pyproject.toml) - # robotframework-pabot - # robotframework-requests - # robotframework-seleniumlibrary - # robotframework-stacktrace -robotframework-pabot==2.16.0 - # via cumulusci (pyproject.toml) -robotframework-pythonlibcore==4.3.0 - # via robotframework-seleniumlibrary -robotframework-requests==0.9.6 - # via cumulusci (pyproject.toml) -robotframework-seleniumlibrary==5.1.3 - # via cumulusci (pyproject.toml) -robotframework-stacktrace==0.4.1 - # via robotframework-pabot -rpds-py==0.13.2 - # via - # jsonschema - # referencing -rst2ansi==0.1.5 - # via cumulusci (pyproject.toml) -salesforce-bulk==2.2.0 - # via cumulusci (pyproject.toml) -sarge==0.1.7.post1 - # via cumulusci (pyproject.toml) -secretstorage==3.3.3 - # via keyring -selenium==3.141.0 - # via - # cumulusci (pyproject.toml) - # robotframework-seleniumlibrary -simple-salesforce==1.11.4 - # via - # cumulusci (pyproject.toml) - # salesforce-bulk -six==1.16.0 - # via - # fs - # python-dateutil - # salesforce-bulk - # snowfakery -snowballstemmer==2.2.0 - # via sphinx -snowfakery==3.6.1 - # via cumulusci (pyproject.toml) -soupsieve==2.5 - # via beautifulsoup4 -sphinx==5.3.0 - # via - # cumulusci (pyproject.toml) - # furo - # myst-parser - # sphinx-basic-ng -sphinx-basic-ng==1.0.0b2 - # via furo -sphinxcontrib-applehelp==1.0.4 - # via sphinx -sphinxcontrib-devhelp==1.0.2 - # via sphinx -sphinxcontrib-htmlhelp==2.0.1 - # via sphinx -sphinxcontrib-jsmath==1.0.1 - # via sphinx -sphinxcontrib-qthelp==1.0.3 - # via sphinx -sphinxcontrib-serializinghtml==1.1.5 - # via sphinx -sqlalchemy==1.4.49 - # via - # cumulusci (pyproject.toml) - # snowfakery -testfixtures==7.2.2 - # via cumulusci (pyproject.toml) -tomli==2.0.1 - # via - # black - # coverage - # pyproject-api - # pytest - # tox -tox==4.11.4 - # via cumulusci (pyproject.toml) -typeguard==2.13.3 - # via cumulusci (pyproject.toml) -types-pyyaml==6.0.12.12 - # via responses -typing-extensions==4.7.1 - # via - # black - # faker - # pydantic - # rich - # snowfakery -unicodecsv==0.14.1 - # via salesforce-bulk -uritemplate==4.1.1 - # via github3-py -urllib3==1.26.16 - # via - # requests - # responses - # selenium - # snowfakery - # vcrpy -vcrpy==5.1.0 - # via - # cumulusci (pyproject.toml) - # pytest-vcr -virtualenv==20.24.7 - # via - # pre-commit - # tox -wrapt==1.16.0 - # via vcrpy -xmltodict==0.13.0 - # via cumulusci (pyproject.toml) -yarl==1.9.3 - # via vcrpy -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/prod.txt b/requirements/prod.txt deleted file mode 100644 index 151f818426..0000000000 --- a/requirements/prod.txt +++ /dev/null @@ -1,191 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --output-file=requirements/prod.txt pyproject.toml -# -appdirs==1.4.4 - # via fs -authlib==1.2.1 - # via simple-salesforce -certifi==2023.7.22 - # via - # requests - # snowfakery -cffi==1.16.0 - # via cryptography -charset-normalizer==3.2.0 - # via - # requests - # snowfakery -click==8.1.6 - # via - # cumulusci (pyproject.toml) - # snowfakery -cryptography==41.0.7 - # via - # authlib - # cumulusci (pyproject.toml) - # pyjwt - # secretstorage -defusedxml==0.7.1 - # via cumulusci (pyproject.toml) -docutils==0.16 - # via cumulusci (pyproject.toml) -faker==19.3.0 - # via - # cumulusci (pyproject.toml) - # faker-edu - # faker-nonprofit - # snowfakery -faker-edu==1.0.0 - # via snowfakery -faker-nonprofit==1.0.0 - # via snowfakery -fs==2.4.16 - # via cumulusci (pyproject.toml) -github3-py==4.0.1 - # via cumulusci (pyproject.toml) -greenlet==3.0.1 - # via sqlalchemy -gvgen==1.0 - # via snowfakery -idna==3.4 - # via - # requests - # snowfakery -importlib-metadata==6.8.0 - # via keyring -jeepney==0.8.0 - # via - # keyring - # secretstorage -jinja2==3.1.2 - # via - # cumulusci (pyproject.toml) - # snowfakery -keyring==23.0.1 - # via cumulusci (pyproject.toml) -lxml==4.9.3 - # via cumulusci (pyproject.toml) -markdown-it-py==2.2.0 - # via - # cumulusci (pyproject.toml) - # rich -markupsafe==2.1.3 - # via - # cumulusci (pyproject.toml) - # jinja2 - # snowfakery -mdurl==0.1.2 - # via markdown-it-py -natsort==8.4.0 - # via robotframework-pabot -psutil==5.9.6 - # via cumulusci (pyproject.toml) -pycparser==2.21 - # via cffi -pydantic==1.10.12 - # via - # cumulusci (pyproject.toml) - # snowfakery -pygments==2.17.2 - # via rich -pyjwt[crypto]==2.8.0 - # via - # cumulusci (pyproject.toml) - # github3-py -python-baseconv==1.2.2 - # via snowfakery -python-dateutil==2.8.2 - # via - # cumulusci (pyproject.toml) - # faker - # github3-py - # snowfakery -pytz==2023.3.post1 - # via cumulusci (pyproject.toml) -pyyaml==6.0.1 - # via - # cumulusci (pyproject.toml) - # snowfakery -requests==2.29.0 - # via - # cumulusci (pyproject.toml) - # github3-py - # requests-futures - # robotframework-requests - # salesforce-bulk - # simple-salesforce - # snowfakery -requests-futures==1.0.1 - # via cumulusci (pyproject.toml) -rich==13.7.0 - # via cumulusci (pyproject.toml) -robotframework==6.1.1 - # via - # cumulusci (pyproject.toml) - # robotframework-pabot - # robotframework-requests - # robotframework-seleniumlibrary - # robotframework-stacktrace -robotframework-pabot==2.16.0 - # via cumulusci (pyproject.toml) -robotframework-pythonlibcore==4.3.0 - # via robotframework-seleniumlibrary -robotframework-requests==0.9.6 - # via cumulusci (pyproject.toml) -robotframework-seleniumlibrary==5.1.3 - # via cumulusci (pyproject.toml) -robotframework-stacktrace==0.4.1 - # via robotframework-pabot -rst2ansi==0.1.5 - # via cumulusci (pyproject.toml) -salesforce-bulk==2.2.0 - # via cumulusci (pyproject.toml) -sarge==0.1.7.post1 - # via cumulusci (pyproject.toml) -secretstorage==3.3.3 - # via keyring -selenium==3.141.0 - # via - # cumulusci (pyproject.toml) - # robotframework-seleniumlibrary -simple-salesforce==1.11.4 - # via - # cumulusci (pyproject.toml) - # salesforce-bulk -six==1.16.0 - # via - # fs - # python-dateutil - # salesforce-bulk - # snowfakery -snowfakery==3.6.1 - # via cumulusci (pyproject.toml) -sqlalchemy==1.4.49 - # via - # cumulusci (pyproject.toml) - # snowfakery -typing-extensions==4.7.1 - # via - # faker - # pydantic - # rich - # snowfakery -unicodecsv==0.14.1 - # via salesforce-bulk -uritemplate==4.1.1 - # via github3-py -urllib3==1.26.16 - # via - # requests - # selenium - # snowfakery -xmltodict==0.13.0 - # via cumulusci (pyproject.toml) -zipp==3.17.0 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements_dev.txt b/requirements_dev.txt deleted file mode 100644 index 45745ac762..0000000000 --- a/requirements_dev.txt +++ /dev/null @@ -1,3 +0,0 @@ --r requirements/prod.txt --r requirements/dev.txt --e . diff --git a/utility/pin_dependencies.py b/utility/pin_dependencies.py deleted file mode 100644 index eeb6c15c23..0000000000 --- a/utility/pin_dependencies.py +++ /dev/null @@ -1,28 +0,0 @@ -import re -from pathlib import Path - -import tomli -import tomli_w - - -def main(toml_filename: Path, requirements_txt: Path): - with open(toml_filename, "rb") as f: - data = tomli.load(f) - - with open(requirements_txt) as f: - requirements = re.findall(r".*==.*", f.read()) - - pin_dependencies(data, requirements) - - with open(toml_filename, "wb") as f: - tomli_w.dump(data, f) - - -def pin_dependencies(data: dict, requirements: str): - data["project"]["dependencies"] = requirements - - -root = Path(__file__).parent.parent -requirements = root / "requirements" -main(root / "pyproject.toml", requirements / "prod.txt") -print("Updated ", root / "pyproject.toml") diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000000..0d39fb87b6 --- /dev/null +++ b/uv.lock @@ -0,0 +1,2000 @@ +version = 1 +requires-python = ">=3.11" +resolution-markers = [ + "platform_python_implementation == 'PyPy'", + "platform_python_implementation != 'PyPy'", +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929 }, +] + +[[package]] +name = "appdirs" +version = "1.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/d8/05696357e0311f5b5c316d7b95f46c669dd9c15aaeecbb48c7d0aeb88c40/appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", size = 13470 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566 }, +] + +[[package]] +name = "attrs" +version = "24.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, +] + +[[package]] +name = "authlib" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/75/47dbab150ef6f9298e227a40c93c7fed5f3ffb67c9fb62cd49f66285e46e/authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2", size = 147313 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/4c/9aa0416a403d5cc80292cb030bcd2c918cce2755e314d8c1aa18656e1e12/Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc", size = 225111 }, +] + +[[package]] +name = "babel" +version = "2.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 }, +] + +[[package]] +name = "black" +version = "24.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/cc/7496bb63a9b06a954d3d0ac9fe7a73f3bf1cd92d7a58877c27f4ad1e9d41/black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad", size = 1607468 }, + { url = "https://files.pythonhosted.org/packages/2b/e3/69a738fb5ba18b5422f50b4f143544c664d7da40f09c13969b2fd52900e0/black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50", size = 1437270 }, + { url = "https://files.pythonhosted.org/packages/c9/9b/2db8045b45844665c720dcfe292fdaf2e49825810c0103e1191515fc101a/black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392", size = 1737061 }, + { url = "https://files.pythonhosted.org/packages/a3/95/17d4a09a5be5f8c65aa4a361444d95edc45def0de887810f508d3f65db7a/black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175", size = 1423293 }, + { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, + { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, + { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, + { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, + { url = "https://files.pythonhosted.org/packages/d0/a0/a993f58d4ecfba035e61fca4e9f64a2ecae838fc9f33ab798c62173ed75c/black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981", size = 1643986 }, + { url = "https://files.pythonhosted.org/packages/37/d5/602d0ef5dfcace3fb4f79c436762f130abd9ee8d950fa2abdbf8bbc555e0/black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b", size = 1448085 }, + { url = "https://files.pythonhosted.org/packages/47/6d/a3a239e938960df1a662b93d6230d4f3e9b4a22982d060fc38c42f45a56b/black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2", size = 1760928 }, + { url = "https://files.pythonhosted.org/packages/dd/cf/af018e13b0eddfb434df4d9cd1b2b7892bab119f7a20123e93f6910982e8/black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b", size = 1436875 }, + { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, +] + +[[package]] +name = "chardet" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339 }, + { url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366 }, + { url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874 }, + { url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243 }, + { url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676 }, + { url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289 }, + { url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585 }, + { url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408 }, + { url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076 }, + { url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874 }, + { url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871 }, + { url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546 }, + { url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048 }, + { url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389 }, + { url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752 }, + { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, + { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, + { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, + { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, + { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, + { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, + { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, + { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, + { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, + { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, + { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, + { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, + { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, + { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, + { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, +] + +[[package]] +name = "click" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "coverage" +version = "7.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/12/3669b6382792783e92046730ad3327f53b2726f0603f4c311c4da4824222/coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73", size = 798716 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/31/9c0cf84f0dfcbe4215b7eb95c31777cdc0483c13390e69584c8150c85175/coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b", size = 206819 }, + { url = "https://files.pythonhosted.org/packages/53/ed/a38401079ad320ad6e054a01ec2b61d270511aeb3c201c80e99c841229d5/coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25", size = 207263 }, + { url = "https://files.pythonhosted.org/packages/20/e7/c3ad33b179ab4213f0d70da25a9c214d52464efa11caeab438592eb1d837/coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546", size = 239205 }, + { url = "https://files.pythonhosted.org/packages/36/91/fc02e8d8e694f557752120487fd982f654ba1421bbaa5560debf96ddceda/coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b", size = 236612 }, + { url = "https://files.pythonhosted.org/packages/cc/57/cb08f0eda0389a9a8aaa4fc1f9fec7ac361c3e2d68efd5890d7042c18aa3/coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e", size = 238479 }, + { url = "https://files.pythonhosted.org/packages/d5/c9/2c7681a9b3ca6e6f43d489c2e6653a53278ed857fd6e7010490c307b0a47/coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718", size = 237405 }, + { url = "https://files.pythonhosted.org/packages/b5/4e/ebfc6944b96317df8b537ae875d2e57c27b84eb98820bc0a1055f358f056/coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db", size = 236038 }, + { url = "https://files.pythonhosted.org/packages/13/f2/3a0bf1841a97c0654905e2ef531170f02c89fad2555879db8fe41a097871/coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522", size = 236812 }, + { url = "https://files.pythonhosted.org/packages/b9/9c/66bf59226b52ce6ed9541b02d33e80a6e816a832558fbdc1111a7bd3abd4/coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf", size = 209400 }, + { url = "https://files.pythonhosted.org/packages/2a/a0/b0790934c04dfc8d658d4a62acb8f7ca0efdf3818456fcad757b11c6479d/coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19", size = 210243 }, + { url = "https://files.pythonhosted.org/packages/7d/e7/9291de916d084f41adddfd4b82246e68d61d6a75747f075f7e64628998d2/coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2", size = 207013 }, + { url = "https://files.pythonhosted.org/packages/27/03/932c2c5717a7fa80cd43c6a07d3177076d97b79f12f40f882f9916db0063/coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117", size = 207251 }, + { url = "https://files.pythonhosted.org/packages/d5/3f/0af47dcb9327f65a45455fbca846fe96eb57c153af46c4754a3ba678938a/coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613", size = 240268 }, + { url = "https://files.pythonhosted.org/packages/8a/3c/37a9d81bbd4b23bc7d46ca820e16174c613579c66342faa390a271d2e18b/coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27", size = 237298 }, + { url = "https://files.pythonhosted.org/packages/c0/70/6b0627e5bd68204ee580126ed3513140b2298995c1233bd67404b4e44d0e/coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52", size = 239367 }, + { url = "https://files.pythonhosted.org/packages/3c/eb/634d7dfab24ac3b790bebaf9da0f4a5352cbc125ce6a9d5c6cf4c6cae3c7/coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2", size = 238853 }, + { url = "https://files.pythonhosted.org/packages/d9/0d/8e3ed00f1266ef7472a4e33458f42e39492e01a64281084fb3043553d3f1/coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1", size = 237160 }, + { url = "https://files.pythonhosted.org/packages/ce/9c/4337f468ef0ab7a2e0887a9c9da0e58e2eada6fc6cbee637a4acd5dfd8a9/coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5", size = 238824 }, + { url = "https://files.pythonhosted.org/packages/5e/09/3e94912b8dd37251377bb02727a33a67ee96b84bbbe092f132b401ca5dd9/coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17", size = 209639 }, + { url = "https://files.pythonhosted.org/packages/01/69/d4f3a4101171f32bc5b3caec8ff94c2c60f700107a6aaef7244b2c166793/coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08", size = 210428 }, + { url = "https://files.pythonhosted.org/packages/c2/4d/2dede4f7cb5a70fb0bb40a57627fddf1dbdc6b9c1db81f7c4dcdcb19e2f4/coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9", size = 207039 }, + { url = "https://files.pythonhosted.org/packages/3f/f9/d86368ae8c79e28f1fb458ebc76ae9ff3e8bd8069adc24e8f2fed03c58b7/coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba", size = 207298 }, + { url = "https://files.pythonhosted.org/packages/64/c5/b4cc3c3f64622c58fbfd4d8b9a7a8ce9d355f172f91fcabbba1f026852f6/coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c", size = 239813 }, + { url = "https://files.pythonhosted.org/packages/8a/86/14c42e60b70a79b26099e4d289ccdfefbc68624d096f4481163085aa614c/coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06", size = 236959 }, + { url = "https://files.pythonhosted.org/packages/7f/f8/4436a643631a2fbab4b44d54f515028f6099bfb1cd95b13cfbf701e7f2f2/coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f", size = 238950 }, + { url = "https://files.pythonhosted.org/packages/49/50/1571810ddd01f99a0a8be464a4ac8b147f322cd1e8e296a1528984fc560b/coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b", size = 238610 }, + { url = "https://files.pythonhosted.org/packages/f3/8c/6312d241fe7cbd1f0cade34a62fea6f333d1a261255d76b9a87074d8703c/coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21", size = 236697 }, + { url = "https://files.pythonhosted.org/packages/ce/5f/fef33dfd05d87ee9030f614c857deb6df6556b8f6a1c51bbbb41e24ee5ac/coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a", size = 238541 }, + { url = "https://files.pythonhosted.org/packages/a9/64/6a984b6e92e1ea1353b7ffa08e27f707a5e29b044622445859200f541e8c/coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e", size = 209707 }, + { url = "https://files.pythonhosted.org/packages/5c/60/ce5a9e942e9543783b3db5d942e0578b391c25cdd5e7f342d854ea83d6b7/coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963", size = 210439 }, + { url = "https://files.pythonhosted.org/packages/78/53/6719677e92c308207e7f10561a1b16ab8b5c00e9328efc9af7cfd6fb703e/coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f", size = 207784 }, + { url = "https://files.pythonhosted.org/packages/fa/dd/7054928930671fcb39ae6a83bb71d9ab5f0afb733172543ced4b09a115ca/coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806", size = 208058 }, + { url = "https://files.pythonhosted.org/packages/b5/7d/fd656ddc2b38301927b9eb3aae3fe827e7aa82e691923ed43721fd9423c9/coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11", size = 250772 }, + { url = "https://files.pythonhosted.org/packages/90/d0/eb9a3cc2100b83064bb086f18aedde3afffd7de6ead28f69736c00b7f302/coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3", size = 246490 }, + { url = "https://files.pythonhosted.org/packages/45/44/3f64f38f6faab8a0cfd2c6bc6eb4c6daead246b97cf5f8fc23bf3788f841/coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a", size = 248848 }, + { url = "https://files.pythonhosted.org/packages/5d/11/4c465a5f98656821e499f4b4619929bd5a34639c466021740ecdca42aa30/coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc", size = 248340 }, + { url = "https://files.pythonhosted.org/packages/f1/96/ebecda2d016cce9da812f404f720ca5df83c6b29f65dc80d2000d0078741/coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70", size = 246229 }, + { url = "https://files.pythonhosted.org/packages/16/d9/3d820c00066ae55d69e6d0eae11d6149a5ca7546de469ba9d597f01bf2d7/coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef", size = 247510 }, + { url = "https://files.pythonhosted.org/packages/8f/c3/4fa1eb412bb288ff6bfcc163c11700ff06e02c5fad8513817186e460ed43/coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e", size = 210353 }, + { url = "https://files.pythonhosted.org/packages/7e/77/03fc2979d1538884d921c2013075917fc927f41cd8526909852fe4494112/coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1", size = 211502 }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11' and python_full_version >= '3.11'" }, +] + +[[package]] +name = "cryptography" +version = "43.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/05/07b55d1fa21ac18c3a8c79f764e2514e6f6a9698f1be44994f5adf0d29db/cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805", size = 686989 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f3/01fdf26701a26f4b4dbc337a26883ad5bccaa6f1bbbdd29cd89e22f18a1c/cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e", size = 6225303 }, + { url = "https://files.pythonhosted.org/packages/a3/01/4896f3d1b392025d4fcbecf40fdea92d3df8662123f6835d0af828d148fd/cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e", size = 3760905 }, + { url = "https://files.pythonhosted.org/packages/0a/be/f9a1f673f0ed4b7f6c643164e513dbad28dd4f2dcdf5715004f172ef24b6/cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f", size = 3977271 }, + { url = "https://files.pythonhosted.org/packages/4e/49/80c3a7b5514d1b416d7350830e8c422a4d667b6d9b16a9392ebfd4a5388a/cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6", size = 3746606 }, + { url = "https://files.pythonhosted.org/packages/0e/16/a28ddf78ac6e7e3f25ebcef69ab15c2c6be5ff9743dd0709a69a4f968472/cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18", size = 3986484 }, + { url = "https://files.pythonhosted.org/packages/01/f5/69ae8da70c19864a32b0315049866c4d411cce423ec169993d0434218762/cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd", size = 3852131 }, + { url = "https://files.pythonhosted.org/packages/fd/db/e74911d95c040f9afd3612b1f732e52b3e517cb80de8bf183be0b7d413c6/cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73", size = 4075647 }, + { url = "https://files.pythonhosted.org/packages/56/48/7b6b190f1462818b324e674fa20d1d5ef3e24f2328675b9b16189cbf0b3c/cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2", size = 2623873 }, + { url = "https://files.pythonhosted.org/packages/eb/b1/0ebff61a004f7f89e7b65ca95f2f2375679d43d0290672f7713ee3162aff/cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd", size = 3068039 }, + { url = "https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984", size = 6222984 }, + { url = "https://files.pythonhosted.org/packages/2f/78/55356eb9075d0be6e81b59f45c7b48df87f76a20e73893872170471f3ee8/cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5", size = 3762968 }, + { url = "https://files.pythonhosted.org/packages/2a/2c/488776a3dc843f95f86d2f957ca0fc3407d0242b50bede7fad1e339be03f/cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4", size = 3977754 }, + { url = "https://files.pythonhosted.org/packages/7c/04/2345ca92f7a22f601a9c62961741ef7dd0127c39f7310dffa0041c80f16f/cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7", size = 3749458 }, + { url = "https://files.pythonhosted.org/packages/ac/25/e715fa0bc24ac2114ed69da33adf451a38abb6f3f24ec207908112e9ba53/cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405", size = 3988220 }, + { url = "https://files.pythonhosted.org/packages/21/ce/b9c9ff56c7164d8e2edfb6c9305045fbc0df4508ccfdb13ee66eb8c95b0e/cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16", size = 3853898 }, + { url = "https://files.pythonhosted.org/packages/2a/33/b3682992ab2e9476b9c81fff22f02c8b0a1e6e1d49ee1750a67d85fd7ed2/cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73", size = 4076592 }, + { url = "https://files.pythonhosted.org/packages/81/1e/ffcc41b3cebd64ca90b28fd58141c5f68c83d48563c88333ab660e002cd3/cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995", size = 2623145 }, + { url = "https://files.pythonhosted.org/packages/87/5c/3dab83cc4aba1f4b0e733e3f0c3e7d4386440d660ba5b1e3ff995feb734d/cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362", size = 3068026 }, +] + +[[package]] +name = "cumulusci" +version = "4.0.0" +source = { editable = "." } +dependencies = [ + { name = "click" }, + { name = "cryptography" }, + { name = "defusedxml" }, + { name = "docutils" }, + { name = "faker" }, + { name = "fs" }, + { name = "github3-py" }, + { name = "jinja2" }, + { name = "keyring" }, + { name = "lxml" }, + { name = "markupsafe" }, + { name = "psutil" }, + { name = "pydantic" }, + { name = "pyjwt" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "requests-futures" }, + { name = "rich" }, + { name = "robotframework" }, + { name = "robotframework-pabot" }, + { name = "robotframework-requests" }, + { name = "robotframework-seleniumlibrary" }, + { name = "rst2ansi" }, + { name = "salesforce-bulk" }, + { name = "sarge" }, + { name = "selenium" }, + { name = "simple-salesforce" }, + { name = "snowfakery" }, + { name = "sqlalchemy" }, + { name = "xmltodict" }, +] + +[package.dev-dependencies] +dev = [ + { name = "coverage", extra = ["toml"] }, + { name = "factory-boy" }, + { name = "furo" }, + { name = "jsonschema" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-random-order" }, + { name = "pytest-vcr" }, + { name = "responses" }, + { name = "testfixtures" }, + { name = "tox" }, + { name = "typeguard" }, + { name = "vcrpy" }, +] +docs = [ + { name = "myst-parser" }, + { name = "sphinx" }, +] +lint = [ + { name = "black" }, + { name = "flake8" }, + { name = "isort" }, + { name = "pre-commit" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.1" }, + { name = "cryptography" }, + { name = "defusedxml" }, + { name = "docutils", specifier = ">=0.21.2" }, + { name = "faker" }, + { name = "fs" }, + { name = "github3-py" }, + { name = "jinja2" }, + { name = "keyring", specifier = "<=23.0.1" }, + { name = "lxml" }, + { name = "markupsafe" }, + { name = "psutil" }, + { name = "pydantic", specifier = "<2" }, + { name = "pyjwt" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "requests-futures" }, + { name = "rich", specifier = ">=13.9.4" }, + { name = "robotframework" }, + { name = "robotframework-pabot" }, + { name = "robotframework-requests" }, + { name = "robotframework-seleniumlibrary", specifier = "<6" }, + { name = "rst2ansi", specifier = ">=0.1.5" }, + { name = "salesforce-bulk" }, + { name = "sarge" }, + { name = "selenium", specifier = "<4" }, + { name = "simple-salesforce", specifier = "==1.11.4" }, + { name = "snowfakery", specifier = ">=4.0.0" }, + { name = "sqlalchemy", specifier = "<2" }, + { name = "xmltodict" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "coverage", extras = ["toml"], specifier = ">=7.6.1" }, + { name = "factory-boy", specifier = ">=3.3.1" }, + { name = "furo", specifier = ">=2023.3.27" }, + { name = "jsonschema", specifier = ">=4.23.0" }, + { name = "pytest", specifier = ">=7.0.1" }, + { name = "pytest-cov", specifier = ">=5.0.0" }, + { name = "pytest-random-order", specifier = ">=1.1.1" }, + { name = "pytest-vcr", specifier = ">=1.0.2" }, + { name = "responses", specifier = ">=0.23.1" }, + { name = "testfixtures", specifier = ">=8.3.0" }, + { name = "tox", specifier = ">=4.20.0" }, + { name = "typeguard", specifier = "<=2.13.3" }, + { name = "vcrpy", specifier = ">=6.0.2" }, +] +docs = [ + { name = "myst-parser", specifier = ">=1.0.0" }, + { name = "sphinx", specifier = ">=5.3.0" }, +] +lint = [ + { name = "black", specifier = ">=24.8.0" }, + { name = "flake8", specifier = "<4" }, + { name = "isort", specifier = ">=5.13.2" }, + { name = "pre-commit", specifier = ">=3.5.0" }, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 }, +] + +[[package]] +name = "distlib" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408 }, +] + +[[package]] +name = "factory-boy" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "faker" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/3d/8070dde623341401b1c80156583d4c793058fe250450178218bb6e45526c/factory_boy-3.3.1.tar.gz", hash = "sha256:8317aa5289cdfc45f9cae570feb07a6177316c82e34d14df3c2e1f22f26abef0", size = 163924 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/44ec67152f3129d0114c1499dd34f0a0a0faf43d9c2af05bc535746ca482/factory_boy-3.3.1-py2.py3-none-any.whl", hash = "sha256:7b1113c49736e1e9995bc2a18f4dbf2c52cf0f841103517010b1d825712ce3ca", size = 36878 }, +] + +[[package]] +name = "faker" +version = "32.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/2a/dd2c8f55d69013d0eee30ec4c998250fb7da957f5fe860ed077b3df1725b/faker-32.1.0.tar.gz", hash = "sha256:aac536ba04e6b7beb2332c67df78485fc29c1880ff723beac6d1efd45e2f10f5", size = 1850193 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/fa/4a82dea32d6262a96e6841cdd4a45c11ac09eecdff018e745565410ac70e/Faker-32.1.0-py3-none-any.whl", hash = "sha256:c77522577863c264bdc9dad3a2a750ad3f7ee43ff8185072e482992288898814", size = 1889123 }, +] + +[[package]] +name = "faker-edu" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "faker" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/a1/e202e07a03e0c6a0e96ba054cb7bccc29f053c0e4f61e55a42905c6cab40/faker-edu-1.1.0.tar.gz", hash = "sha256:4f2117a969b42a0adf99ececdfebf2fb65066ea6fe49a1eb01a1168c32ff5485", size = 6133 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/c2/4c621cc8132e67db86fe145ff13685ecc4620b601430b13bce145de4595e/faker_edu-1.1.0-py3-none-any.whl", hash = "sha256:1f0b025d5b66273ae663d88837d7c2616ce1f48289c74ecc2aee749a6693754e", size = 6723 }, +] + +[[package]] +name = "faker-nonprofit" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "faker" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6b/9e/16412dd623985d8d3cb8db22e05d6764d61795ee86af5dff0654f4f69499/faker-nonprofit-1.0.0.tar.gz", hash = "sha256:bcadd173a185ae8fb9dd184010cd55c9ebac034ea893f40d51beb5be93216983", size = 3392 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/3e/e338da76206031aa2d9c187f7929554991edf0153bc12a2e5f6457a93462/faker_nonprofit-1.0.0-py3-none-any.whl", hash = "sha256:ba98ae0a05bc139941db34853e4f6880a480b679e2e0d59f7b9bd18a540a6232", size = 4009 }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, +] + +[[package]] +name = "flake8" +version = "3.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pyflakes" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/47/15b267dfe7e03dca4c4c06e7eadbd55ef4dfd368b13a0bab36d708b14366/flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b", size = 164777 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/80/35a0716e5d5101e643404dabd20f07f5528a21f3ef4032d31a49c913237b/flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907", size = 73147 }, +] + +[[package]] +name = "fs" +version = "2.4.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "appdirs" }, + { name = "setuptools" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5d/a9/af5bfd5a92592c16cdae5c04f68187a309be8a146b528eac3c6e30edbad2/fs-2.4.16.tar.gz", hash = "sha256:ae97c7d51213f4b70b6a958292530289090de3a7e15841e108fbe144f069d313", size = 187441 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/5c/a3d95dc1ec6cdeb032d789b552ecc76effa3557ea9186e1566df6aac18df/fs-2.4.16-py2.py3-none-any.whl", hash = "sha256:660064febbccda264ae0b6bace80a8d1be9e089e0a5eb2427b7d517f9a91545c", size = 135261 }, +] + +[[package]] +name = "furo" +version = "2024.8.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "pygments" }, + { name = "sphinx" }, + { name = "sphinx-basic-ng" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/e2/d351d69a9a9e4badb4a5be062c2d0e87bd9e6c23b5e57337fef14bef34c8/furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01", size = 1661506 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/48/e791a7ed487dbb9729ef32bb5d1af16693d8925f4366befef54119b2e576/furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c", size = 341333 }, +] + +[[package]] +name = "github3-py" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyjwt", extra = ["crypto"] }, + { name = "python-dateutil" }, + { name = "requests" }, + { name = "uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/91/603bcaf8cd1b3927de64bf56c3a8915f6653ea7281919140c5bcff2bfe7b/github3.py-4.0.1.tar.gz", hash = "sha256:30d571076753efc389edc7f9aaef338a4fcb24b54d8968d5f39b1342f45ddd36", size = 36214038 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/2394d4fb542574678b0ba342daf734d4d811768da3c2ee0c84d509dcb26c/github3.py-4.0.1-py3-none-any.whl", hash = "sha256:a89af7de25650612d1da2f0609622bcdeb07ee8a45a1c06b2d16a05e4234e753", size = 151800 }, +] + +[[package]] +name = "greenlet" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/62/1c2665558618553c42922ed47a4e6d6527e2fa3516a8256c2f431c5d0441/greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", size = 272479 }, + { url = "https://files.pythonhosted.org/packages/76/9d/421e2d5f07285b6e4e3a676b016ca781f63cfe4a0cd8eaecf3fd6f7a71ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", size = 640404 }, + { url = "https://files.pythonhosted.org/packages/e5/de/6e05f5c59262a584e502dd3d261bbdd2c97ab5416cc9c0b91ea38932a901/greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", size = 652813 }, + { url = "https://files.pythonhosted.org/packages/49/93/d5f93c84241acdea15a8fd329362c2c71c79e1a507c3f142a5d67ea435ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", size = 648517 }, + { url = "https://files.pythonhosted.org/packages/15/85/72f77fc02d00470c86a5c982b8daafdf65d38aefbbe441cebff3bf7037fc/greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", size = 647831 }, + { url = "https://files.pythonhosted.org/packages/f7/4b/1c9695aa24f808e156c8f4813f685d975ca73c000c2a5056c514c64980f6/greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", size = 602413 }, + { url = "https://files.pythonhosted.org/packages/76/70/ad6e5b31ef330f03b12559d19fda2606a522d3849cde46b24f223d6d1619/greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", size = 1129619 }, + { url = "https://files.pythonhosted.org/packages/f4/fb/201e1b932e584066e0f0658b538e73c459b34d44b4bd4034f682423bc801/greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", size = 1155198 }, + { url = "https://files.pythonhosted.org/packages/12/da/b9ed5e310bb8b89661b80cbcd4db5a067903bbcd7fc854923f5ebb4144f0/greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", size = 298930 }, + { url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 }, + { url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 }, + { url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 }, + { url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 }, + { url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 }, + { url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 }, + { url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 }, + { url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 }, + { url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 }, + { url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 }, + { url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 }, + { url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 }, + { url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 }, + { url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 }, + { url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 }, + { url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 }, + { url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 }, + { url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 }, + { url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 }, + { url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 }, + { url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 }, + { url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 }, + { url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 }, + { url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 }, + { url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 }, +] + +[[package]] +name = "gvgen" +version = "1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/59/bf1fee74afaa055ae999d899369eb3278b55e3503be281a2f2cdf8ae6824/GvGen-1.0.tar.gz", hash = "sha256:e8d2ae8e042a6a96150e814f57402d142aa768943d827443409acf925ee756d2", size = 8636 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/74/5f2a4ddcc45dc0682f9c331ceb4f5f53fcaaa0aa6898edd9938827d15783/GvGen-1.0-py3-none-any.whl", hash = "sha256:6b84f00c9cd55298248d8b24a7f4f97af14f3896984dee07df391e47aac20079", size = 7997 }, +] + +[[package]] +name = "identify" +version = "2.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/79/7a520fc5011e02ca3f3285b5f6820eaf80443eb73e3733f73c02fb42ba0b/identify-2.6.2.tar.gz", hash = "sha256:fab5c716c24d7a789775228823797296a2994b075fb6080ac83a102772a98cbd", size = 99113 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/86/c4395700f3c5475424fb5c41e20c16be28d10c904aee4d005ba3217fc8e7/identify-2.6.2-py2.py3-none-any.whl", hash = "sha256:c097384259f49e372f4ea00a19719d95ae27dd5ff0fd77ad630aa891306b82f3", size = 98982 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769 }, +] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "isort" +version = "5.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/f9/c1eb8635a24e87ade2efce21e3ce8cd6b8630bb685ddc9cdaca1349b2eb5/isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", size = 175303 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/b3/8def84f539e7d2289a02f0524b944b15d7c75dab7628bedf1c4f0992029c/isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6", size = 92310 }, +] + +[[package]] +name = "jeepney" +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/f4/154cf374c2daf2020e05c3c6a03c91348d59b23c5366e968feb198306fdf/jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806", size = 106005 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/72/2a1e2290f1ab1e06f71f3d0f1646c9e4634e70e1d37491535e19266e8dc9/jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755", size = 48435 }, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, +] + +[[package]] +name = "jsonschema" +version = "4.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462 }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2024.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/db/58f950c996c793472e336ff3655b13fbcf1e3b359dcf52dcf3ed3b52c352/jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272", size = 15561 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/0f/8910b19ac0670a0f80ce1008e5e751c4a57e14d2c4c13a482aa6079fa9d6/jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf", size = 18459 }, +] + +[[package]] +name = "keyring" +version = "23.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b0/b5/b27458e1d2adf2a11c6e95c67ac63f828e96fe7e166132e5dacbe03e88c0/keyring-23.0.1.tar.gz", hash = "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8", size = 59185 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/f9/41230ac47f738f1ba66676dc8d3b30ca5b1f9eb0230fc204bcd9836c4ae9/keyring-23.0.1-py3-none-any.whl", hash = "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48", size = 33013 }, +] + +[[package]] +name = "lxml" +version = "5.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/6b/20c3a4b24751377aaa6307eb230b66701024012c29dd374999cc92983269/lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f", size = 3679318 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/a8/449faa2a3cbe6a99f8d38dcd51a3ee8844c17862841a6f769ea7c2a9cd0f/lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b", size = 8141056 }, + { url = "https://files.pythonhosted.org/packages/ac/8a/ae6325e994e2052de92f894363b038351c50ee38749d30cc6b6d96aaf90f/lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18", size = 4425238 }, + { url = "https://files.pythonhosted.org/packages/f8/fb/128dddb7f9086236bce0eeae2bfb316d138b49b159f50bc681d56c1bdd19/lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442", size = 5095197 }, + { url = "https://files.pythonhosted.org/packages/b4/f9/a181a8ef106e41e3086629c8bdb2d21a942f14c84a0e77452c22d6b22091/lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4", size = 4809809 }, + { url = "https://files.pythonhosted.org/packages/25/2f/b20565e808f7f6868aacea48ddcdd7e9e9fb4c799287f21f1a6c7c2e8b71/lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f", size = 5407593 }, + { url = "https://files.pythonhosted.org/packages/23/0e/caac672ec246d3189a16c4d364ed4f7d6bf856c080215382c06764058c08/lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e", size = 4866657 }, + { url = "https://files.pythonhosted.org/packages/67/a4/1f5fbd3f58d4069000522196b0b776a014f3feec1796da03e495cf23532d/lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c", size = 4967017 }, + { url = "https://files.pythonhosted.org/packages/ee/73/623ecea6ca3c530dd0a4ed0d00d9702e0e85cd5624e2d5b93b005fe00abd/lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16", size = 4810730 }, + { url = "https://files.pythonhosted.org/packages/1d/ce/fb84fb8e3c298f3a245ae3ea6221c2426f1bbaa82d10a88787412a498145/lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79", size = 5455154 }, + { url = "https://files.pythonhosted.org/packages/b1/72/4d1ad363748a72c7c0411c28be2b0dc7150d91e823eadad3b91a4514cbea/lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080", size = 4969416 }, + { url = "https://files.pythonhosted.org/packages/42/07/b29571a58a3a80681722ea8ed0ba569211d9bb8531ad49b5cacf6d409185/lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654", size = 5013672 }, + { url = "https://files.pythonhosted.org/packages/b9/93/bde740d5a58cf04cbd38e3dd93ad1e36c2f95553bbf7d57807bc6815d926/lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d", size = 4878644 }, + { url = "https://files.pythonhosted.org/packages/56/b5/645c8c02721d49927c93181de4017164ec0e141413577687c3df8ff0800f/lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763", size = 5511531 }, + { url = "https://files.pythonhosted.org/packages/85/3f/6a99a12d9438316f4fc86ef88c5d4c8fb674247b17f3173ecadd8346b671/lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec", size = 5402065 }, + { url = "https://files.pythonhosted.org/packages/80/8a/df47bff6ad5ac57335bf552babfb2408f9eb680c074ec1ba412a1a6af2c5/lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be", size = 5069775 }, + { url = "https://files.pythonhosted.org/packages/08/ae/e7ad0f0fbe4b6368c5ee1e3ef0c3365098d806d42379c46c1ba2802a52f7/lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9", size = 3474226 }, + { url = "https://files.pythonhosted.org/packages/c3/b5/91c2249bfac02ee514ab135e9304b89d55967be7e53e94a879b74eec7a5c/lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1", size = 3814971 }, + { url = "https://files.pythonhosted.org/packages/eb/6d/d1f1c5e40c64bf62afd7a3f9b34ce18a586a1cccbf71e783cd0a6d8e8971/lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859", size = 8171753 }, + { url = "https://files.pythonhosted.org/packages/bd/83/26b1864921869784355459f374896dcf8b44d4af3b15d7697e9156cb2de9/lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e", size = 4441955 }, + { url = "https://files.pythonhosted.org/packages/e0/d2/e9bff9fb359226c25cda3538f664f54f2804f4b37b0d7c944639e1a51f69/lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f", size = 5050778 }, + { url = "https://files.pythonhosted.org/packages/88/69/6972bfafa8cd3ddc8562b126dd607011e218e17be313a8b1b9cc5a0ee876/lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e", size = 4748628 }, + { url = "https://files.pythonhosted.org/packages/5d/ea/a6523c7c7f6dc755a6eed3d2f6d6646617cad4d3d6d8ce4ed71bfd2362c8/lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179", size = 5322215 }, + { url = "https://files.pythonhosted.org/packages/99/37/396fbd24a70f62b31d988e4500f2068c7f3fd399d2fd45257d13eab51a6f/lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a", size = 4813963 }, + { url = "https://files.pythonhosted.org/packages/09/91/e6136f17459a11ce1757df864b213efbeab7adcb2efa63efb1b846ab6723/lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3", size = 4923353 }, + { url = "https://files.pythonhosted.org/packages/1d/7c/2eeecf87c9a1fca4f84f991067c693e67340f2b7127fc3eca8fa29d75ee3/lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1", size = 4740541 }, + { url = "https://files.pythonhosted.org/packages/3b/ed/4c38ba58defca84f5f0d0ac2480fdcd99fc7ae4b28fc417c93640a6949ae/lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d", size = 5346504 }, + { url = "https://files.pythonhosted.org/packages/a5/22/bbd3995437e5745cb4c2b5d89088d70ab19d4feabf8a27a24cecb9745464/lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c", size = 4898077 }, + { url = "https://files.pythonhosted.org/packages/0a/6e/94537acfb5b8f18235d13186d247bca478fea5e87d224644e0fe907df976/lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99", size = 4946543 }, + { url = "https://files.pythonhosted.org/packages/8d/e8/4b15df533fe8e8d53363b23a41df9be907330e1fa28c7ca36893fad338ee/lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff", size = 4816841 }, + { url = "https://files.pythonhosted.org/packages/1a/e7/03f390ea37d1acda50bc538feb5b2bda6745b25731e4e76ab48fae7106bf/lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a", size = 5417341 }, + { url = "https://files.pythonhosted.org/packages/ea/99/d1133ab4c250da85a883c3b60249d3d3e7c64f24faff494cf0fd23f91e80/lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8", size = 5327539 }, + { url = "https://files.pythonhosted.org/packages/7d/ed/e6276c8d9668028213df01f598f385b05b55a4e1b4662ee12ef05dab35aa/lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d", size = 5012542 }, + { url = "https://files.pythonhosted.org/packages/36/88/684d4e800f5aa28df2a991a6a622783fb73cf0e46235cfa690f9776f032e/lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30", size = 3486454 }, + { url = "https://files.pythonhosted.org/packages/fc/82/ace5a5676051e60355bd8fb945df7b1ba4f4fb8447f2010fb816bfd57724/lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f", size = 3816857 }, + { url = "https://files.pythonhosted.org/packages/94/6a/42141e4d373903bfea6f8e94b2f554d05506dfda522ada5343c651410dc8/lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a", size = 8156284 }, + { url = "https://files.pythonhosted.org/packages/91/5e/fa097f0f7d8b3d113fb7312c6308af702f2667f22644441715be961f2c7e/lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd", size = 4432407 }, + { url = "https://files.pythonhosted.org/packages/2d/a1/b901988aa6d4ff937f2e5cfc114e4ec561901ff00660c3e56713642728da/lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51", size = 5048331 }, + { url = "https://files.pythonhosted.org/packages/30/0f/b2a54f48e52de578b71bbe2a2f8160672a8a5e103df3a78da53907e8c7ed/lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b", size = 4744835 }, + { url = "https://files.pythonhosted.org/packages/82/9d/b000c15538b60934589e83826ecbc437a1586488d7c13f8ee5ff1f79a9b8/lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002", size = 5316649 }, + { url = "https://files.pythonhosted.org/packages/e3/ee/ffbb9eaff5e541922611d2c56b175c45893d1c0b8b11e5a497708a6a3b3b/lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4", size = 4812046 }, + { url = "https://files.pythonhosted.org/packages/15/ff/7ff89d567485c7b943cdac316087f16b2399a8b997007ed352a1248397e5/lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492", size = 4918597 }, + { url = "https://files.pythonhosted.org/packages/c6/a3/535b6ed8c048412ff51268bdf4bf1cf052a37aa7e31d2e6518038a883b29/lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3", size = 4738071 }, + { url = "https://files.pythonhosted.org/packages/7a/8f/cbbfa59cb4d4fd677fe183725a76d8c956495d7a3c7f111ab8f5e13d2e83/lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4", size = 5342213 }, + { url = "https://files.pythonhosted.org/packages/5c/fb/db4c10dd9958d4b52e34d1d1f7c1f434422aeaf6ae2bbaaff2264351d944/lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367", size = 4893749 }, + { url = "https://files.pythonhosted.org/packages/f2/38/bb4581c143957c47740de18a3281a0cab7722390a77cc6e610e8ebf2d736/lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832", size = 4945901 }, + { url = "https://files.pythonhosted.org/packages/fc/d5/18b7de4960c731e98037bd48fa9f8e6e8f2558e6fbca4303d9b14d21ef3b/lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff", size = 4815447 }, + { url = "https://files.pythonhosted.org/packages/97/a8/cd51ceaad6eb849246559a8ef60ae55065a3df550fc5fcd27014361c1bab/lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd", size = 5411186 }, + { url = "https://files.pythonhosted.org/packages/89/c3/1e3dabab519481ed7b1fdcba21dcfb8832f57000733ef0e71cf6d09a5e03/lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb", size = 5324481 }, + { url = "https://files.pythonhosted.org/packages/b6/17/71e9984cf0570cd202ac0a1c9ed5c1b8889b0fc8dc736f5ef0ffb181c284/lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b", size = 5011053 }, + { url = "https://files.pythonhosted.org/packages/69/68/9f7e6d3312a91e30829368c2b3217e750adef12a6f8eb10498249f4e8d72/lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957", size = 3485634 }, + { url = "https://files.pythonhosted.org/packages/7d/db/214290d58ad68c587bd5d6af3d34e56830438733d0d0856c0275fde43652/lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d", size = 3814417 }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, +] + +[[package]] +name = "mccabe" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/18/fa675aa501e11d6d6ca0ae73a101b2f3571a565e0f7d38e062eec18a91ee/mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f", size = 8612 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/89/479dc97e18549e21354893e4ee4ef36db1d237534982482c3681ee6e7b57/mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", size = 8556 }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "multidict" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/be/504b89a5e9ca731cd47487e91c469064f8ae5af93b7259758dcfc2b9c848/multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a", size = 64002 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/13/df3505a46d0cd08428e4c8169a196131d1b0c4b515c3649829258843dde6/multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6", size = 48570 }, + { url = "https://files.pythonhosted.org/packages/f0/e1/a215908bfae1343cdb72f805366592bdd60487b4232d039c437fe8f5013d/multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156", size = 29316 }, + { url = "https://files.pythonhosted.org/packages/70/0f/6dc70ddf5d442702ed74f298d69977f904960b82368532c88e854b79f72b/multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb", size = 29640 }, + { url = "https://files.pythonhosted.org/packages/d8/6d/9c87b73a13d1cdea30b321ef4b3824449866bd7f7127eceed066ccb9b9ff/multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b", size = 131067 }, + { url = "https://files.pythonhosted.org/packages/cc/1e/1b34154fef373371fd6c65125b3d42ff5f56c7ccc6bfff91b9b3c60ae9e0/multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72", size = 138507 }, + { url = "https://files.pythonhosted.org/packages/fb/e0/0bc6b2bac6e461822b5f575eae85da6aae76d0e2a79b6665d6206b8e2e48/multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304", size = 133905 }, + { url = "https://files.pythonhosted.org/packages/ba/af/73d13b918071ff9b2205fcf773d316e0f8fefb4ec65354bbcf0b10908cc6/multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351", size = 129004 }, + { url = "https://files.pythonhosted.org/packages/74/21/23960627b00ed39643302d81bcda44c9444ebcdc04ee5bedd0757513f259/multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb", size = 121308 }, + { url = "https://files.pythonhosted.org/packages/8b/5c/cf282263ffce4a596ed0bb2aa1a1dddfe1996d6a62d08842a8d4b33dca13/multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3", size = 132608 }, + { url = "https://files.pythonhosted.org/packages/d7/3e/97e778c041c72063f42b290888daff008d3ab1427f5b09b714f5a8eff294/multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399", size = 127029 }, + { url = "https://files.pythonhosted.org/packages/47/ac/3efb7bfe2f3aefcf8d103e9a7162572f01936155ab2f7ebcc7c255a23212/multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423", size = 137594 }, + { url = "https://files.pythonhosted.org/packages/42/9b/6c6e9e8dc4f915fc90a9b7798c44a30773dea2995fdcb619870e705afe2b/multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3", size = 134556 }, + { url = "https://files.pythonhosted.org/packages/1d/10/8e881743b26aaf718379a14ac58572a240e8293a1c9d68e1418fb11c0f90/multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753", size = 130993 }, + { url = "https://files.pythonhosted.org/packages/45/84/3eb91b4b557442802d058a7579e864b329968c8d0ea57d907e7023c677f2/multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80", size = 26405 }, + { url = "https://files.pythonhosted.org/packages/9f/0b/ad879847ecbf6d27e90a6eabb7eff6b62c129eefe617ea45eae7c1f0aead/multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926", size = 28795 }, + { url = "https://files.pythonhosted.org/packages/fd/16/92057c74ba3b96d5e211b553895cd6dc7cc4d1e43d9ab8fafc727681ef71/multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa", size = 48713 }, + { url = "https://files.pythonhosted.org/packages/94/3d/37d1b8893ae79716179540b89fc6a0ee56b4a65fcc0d63535c6f5d96f217/multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436", size = 29516 }, + { url = "https://files.pythonhosted.org/packages/a2/12/adb6b3200c363062f805275b4c1e656be2b3681aada66c80129932ff0bae/multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761", size = 29557 }, + { url = "https://files.pythonhosted.org/packages/47/e9/604bb05e6e5bce1e6a5cf80a474e0f072e80d8ac105f1b994a53e0b28c42/multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e", size = 130170 }, + { url = "https://files.pythonhosted.org/packages/7e/13/9efa50801785eccbf7086b3c83b71a4fb501a4d43549c2f2f80b8787d69f/multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef", size = 134836 }, + { url = "https://files.pythonhosted.org/packages/bf/0f/93808b765192780d117814a6dfcc2e75de6dcc610009ad408b8814dca3ba/multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95", size = 133475 }, + { url = "https://files.pythonhosted.org/packages/d3/c8/529101d7176fe7dfe1d99604e48d69c5dfdcadb4f06561f465c8ef12b4df/multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925", size = 131049 }, + { url = "https://files.pythonhosted.org/packages/ca/0c/fc85b439014d5a58063e19c3a158a889deec399d47b5269a0f3b6a2e28bc/multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966", size = 120370 }, + { url = "https://files.pythonhosted.org/packages/db/46/d4416eb20176492d2258fbd47b4abe729ff3b6e9c829ea4236f93c865089/multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305", size = 125178 }, + { url = "https://files.pythonhosted.org/packages/5b/46/73697ad7ec521df7de5531a32780bbfd908ded0643cbe457f981a701457c/multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2", size = 119567 }, + { url = "https://files.pythonhosted.org/packages/cd/ed/51f060e2cb0e7635329fa6ff930aa5cffa17f4c7f5c6c3ddc3500708e2f2/multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2", size = 129822 }, + { url = "https://files.pythonhosted.org/packages/df/9e/ee7d1954b1331da3eddea0c4e08d9142da5f14b1321c7301f5014f49d492/multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6", size = 128656 }, + { url = "https://files.pythonhosted.org/packages/77/00/8538f11e3356b5d95fa4b024aa566cde7a38aa7a5f08f4912b32a037c5dc/multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3", size = 125360 }, + { url = "https://files.pythonhosted.org/packages/be/05/5d334c1f2462d43fec2363cd00b1c44c93a78c3925d952e9a71caf662e96/multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133", size = 26382 }, + { url = "https://files.pythonhosted.org/packages/a3/bf/f332a13486b1ed0496d624bcc7e8357bb8053823e8cd4b9a18edc1d97e73/multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1", size = 28529 }, + { url = "https://files.pythonhosted.org/packages/22/67/1c7c0f39fe069aa4e5d794f323be24bf4d33d62d2a348acdb7991f8f30db/multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008", size = 48771 }, + { url = "https://files.pythonhosted.org/packages/3c/25/c186ee7b212bdf0df2519eacfb1981a017bda34392c67542c274651daf23/multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f", size = 29533 }, + { url = "https://files.pythonhosted.org/packages/67/5e/04575fd837e0958e324ca035b339cea174554f6f641d3fb2b4f2e7ff44a2/multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28", size = 29595 }, + { url = "https://files.pythonhosted.org/packages/d3/b2/e56388f86663810c07cfe4a3c3d87227f3811eeb2d08450b9e5d19d78876/multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b", size = 130094 }, + { url = "https://files.pythonhosted.org/packages/6c/ee/30ae9b4186a644d284543d55d491fbd4239b015d36b23fea43b4c94f7052/multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c", size = 134876 }, + { url = "https://files.pythonhosted.org/packages/84/c7/70461c13ba8ce3c779503c70ec9d0345ae84de04521c1f45a04d5f48943d/multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3", size = 133500 }, + { url = "https://files.pythonhosted.org/packages/4a/9f/002af221253f10f99959561123fae676148dd730e2daa2cd053846a58507/multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44", size = 131099 }, + { url = "https://files.pythonhosted.org/packages/82/42/d1c7a7301d52af79d88548a97e297f9d99c961ad76bbe6f67442bb77f097/multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2", size = 120403 }, + { url = "https://files.pythonhosted.org/packages/68/f3/471985c2c7ac707547553e8f37cff5158030d36bdec4414cb825fbaa5327/multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3", size = 125348 }, + { url = "https://files.pythonhosted.org/packages/67/2c/e6df05c77e0e433c214ec1d21ddd203d9a4770a1f2866a8ca40a545869a0/multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa", size = 119673 }, + { url = "https://files.pythonhosted.org/packages/c5/cd/bc8608fff06239c9fb333f9db7743a1b2eafe98c2666c9a196e867a3a0a4/multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa", size = 129927 }, + { url = "https://files.pythonhosted.org/packages/44/8e/281b69b7bc84fc963a44dc6e0bbcc7150e517b91df368a27834299a526ac/multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4", size = 128711 }, + { url = "https://files.pythonhosted.org/packages/12/a4/63e7cd38ed29dd9f1881d5119f272c898ca92536cdb53ffe0843197f6c85/multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6", size = 125519 }, + { url = "https://files.pythonhosted.org/packages/38/e0/4f5855037a72cd8a7a2f60a3952d9aa45feedb37ae7831642102604e8a37/multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81", size = 26426 }, + { url = "https://files.pythonhosted.org/packages/7e/a5/17ee3a4db1e310b7405f5d25834460073a8ccd86198ce044dfaf69eac073/multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774", size = 28531 }, + { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "myst-parser" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "jinja2" }, + { name = "markdown-it-py" }, + { name = "mdit-py-plugins" }, + { name = "pyyaml" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/55/6d1741a1780e5e65038b74bce6689da15f620261c490c3511eb4c12bac4b/myst_parser-4.0.0.tar.gz", hash = "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531", size = 93858 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/b4/b036f8fdb667587bb37df29dc6644681dd78b7a2a6321a34684b79412b28/myst_parser-4.0.0-py3-none-any.whl", hash = "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d", size = 84563 }, +] + +[[package]] +name = "natsort" +version = "8.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/a9/a0c57aee75f77794adaf35322f8b6404cbd0f89ad45c87197a937764b7d0/natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581", size = 76575 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/82/7a9d0550484a62c6da82858ee9419f3dd1ccc9aa1c26a1e43da3ecd20b0d/natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c", size = 38268 }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "pre-commit" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713 }, +] + +[[package]] +name = "propcache" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/4d/5e5a60b78dbc1d464f8a7bbaeb30957257afdc8512cbb9dfd5659304f5cd/propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70", size = 40951 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/1c/71eec730e12aec6511e702ad0cd73c2872eccb7cad39de8ba3ba9de693ef/propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354", size = 80811 }, + { url = "https://files.pythonhosted.org/packages/89/c3/7e94009f9a4934c48a371632197406a8860b9f08e3f7f7d922ab69e57a41/propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de", size = 46365 }, + { url = "https://files.pythonhosted.org/packages/c0/1d/c700d16d1d6903aeab28372fe9999762f074b80b96a0ccc953175b858743/propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87", size = 45602 }, + { url = "https://files.pythonhosted.org/packages/2e/5e/4a3e96380805bf742712e39a4534689f4cddf5fa2d3a93f22e9fd8001b23/propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016", size = 236161 }, + { url = "https://files.pythonhosted.org/packages/a5/85/90132481183d1436dff6e29f4fa81b891afb6cb89a7306f32ac500a25932/propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb", size = 244938 }, + { url = "https://files.pythonhosted.org/packages/4a/89/c893533cb45c79c970834274e2d0f6d64383ec740be631b6a0a1d2b4ddc0/propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2", size = 243576 }, + { url = "https://files.pythonhosted.org/packages/8c/56/98c2054c8526331a05f205bf45cbb2cda4e58e56df70e76d6a509e5d6ec6/propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4", size = 236011 }, + { url = "https://files.pythonhosted.org/packages/2d/0c/8b8b9f8a6e1abd869c0fa79b907228e7abb966919047d294ef5df0d136cf/propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504", size = 224834 }, + { url = "https://files.pythonhosted.org/packages/18/bb/397d05a7298b7711b90e13108db697732325cafdcd8484c894885c1bf109/propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178", size = 224946 }, + { url = "https://files.pythonhosted.org/packages/25/19/4fc08dac19297ac58135c03770b42377be211622fd0147f015f78d47cd31/propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d", size = 217280 }, + { url = "https://files.pythonhosted.org/packages/7e/76/c79276a43df2096ce2aba07ce47576832b1174c0c480fe6b04bd70120e59/propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2", size = 220088 }, + { url = "https://files.pythonhosted.org/packages/c3/9a/8a8cf428a91b1336b883f09c8b884e1734c87f724d74b917129a24fe2093/propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db", size = 233008 }, + { url = "https://files.pythonhosted.org/packages/25/7b/768a8969abd447d5f0f3333df85c6a5d94982a1bc9a89c53c154bf7a8b11/propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b", size = 237719 }, + { url = "https://files.pythonhosted.org/packages/ed/0d/e5d68ccc7976ef8b57d80613ac07bbaf0614d43f4750cf953f0168ef114f/propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b", size = 227729 }, + { url = "https://files.pythonhosted.org/packages/05/64/17eb2796e2d1c3d0c431dc5f40078d7282f4645af0bb4da9097fbb628c6c/propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1", size = 40473 }, + { url = "https://files.pythonhosted.org/packages/83/c5/e89fc428ccdc897ade08cd7605f174c69390147526627a7650fb883e0cd0/propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71", size = 44921 }, + { url = "https://files.pythonhosted.org/packages/7c/46/a41ca1097769fc548fc9216ec4c1471b772cc39720eb47ed7e38ef0006a9/propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2", size = 80800 }, + { url = "https://files.pythonhosted.org/packages/75/4f/93df46aab9cc473498ff56be39b5f6ee1e33529223d7a4d8c0a6101a9ba2/propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7", size = 46443 }, + { url = "https://files.pythonhosted.org/packages/0b/17/308acc6aee65d0f9a8375e36c4807ac6605d1f38074b1581bd4042b9fb37/propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8", size = 45676 }, + { url = "https://files.pythonhosted.org/packages/65/44/626599d2854d6c1d4530b9a05e7ff2ee22b790358334b475ed7c89f7d625/propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793", size = 246191 }, + { url = "https://files.pythonhosted.org/packages/f2/df/5d996d7cb18df076debae7d76ac3da085c0575a9f2be6b1f707fe227b54c/propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09", size = 251791 }, + { url = "https://files.pythonhosted.org/packages/2e/6d/9f91e5dde8b1f662f6dd4dff36098ed22a1ef4e08e1316f05f4758f1576c/propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89", size = 253434 }, + { url = "https://files.pythonhosted.org/packages/3c/e9/1b54b7e26f50b3e0497cd13d3483d781d284452c2c50dd2a615a92a087a3/propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e", size = 248150 }, + { url = "https://files.pythonhosted.org/packages/a7/ef/a35bf191c8038fe3ce9a414b907371c81d102384eda5dbafe6f4dce0cf9b/propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9", size = 233568 }, + { url = "https://files.pythonhosted.org/packages/97/d9/d00bb9277a9165a5e6d60f2142cd1a38a750045c9c12e47ae087f686d781/propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4", size = 229874 }, + { url = "https://files.pythonhosted.org/packages/8e/78/c123cf22469bdc4b18efb78893e69c70a8b16de88e6160b69ca6bdd88b5d/propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c", size = 225857 }, + { url = "https://files.pythonhosted.org/packages/31/1b/fd6b2f1f36d028820d35475be78859d8c89c8f091ad30e377ac49fd66359/propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887", size = 227604 }, + { url = "https://files.pythonhosted.org/packages/99/36/b07be976edf77a07233ba712e53262937625af02154353171716894a86a6/propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57", size = 238430 }, + { url = "https://files.pythonhosted.org/packages/0d/64/5822f496c9010e3966e934a011ac08cac8734561842bc7c1f65586e0683c/propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23", size = 244814 }, + { url = "https://files.pythonhosted.org/packages/fd/bd/8657918a35d50b18a9e4d78a5df7b6c82a637a311ab20851eef4326305c1/propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348", size = 235922 }, + { url = "https://files.pythonhosted.org/packages/a8/6f/ec0095e1647b4727db945213a9f395b1103c442ef65e54c62e92a72a3f75/propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5", size = 40177 }, + { url = "https://files.pythonhosted.org/packages/20/a2/bd0896fdc4f4c1db46d9bc361c8c79a9bf08ccc08ba054a98e38e7ba1557/propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3", size = 44446 }, + { url = "https://files.pythonhosted.org/packages/a8/a7/5f37b69197d4f558bfef5b4bceaff7c43cc9b51adf5bd75e9081d7ea80e4/propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7", size = 78120 }, + { url = "https://files.pythonhosted.org/packages/c8/cd/48ab2b30a6b353ecb95a244915f85756d74f815862eb2ecc7a518d565b48/propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763", size = 45127 }, + { url = "https://files.pythonhosted.org/packages/a5/ba/0a1ef94a3412aab057bd996ed5f0ac7458be5bf469e85c70fa9ceb43290b/propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d", size = 44419 }, + { url = "https://files.pythonhosted.org/packages/b4/6c/ca70bee4f22fa99eacd04f4d2f1699be9d13538ccf22b3169a61c60a27fa/propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a", size = 229611 }, + { url = "https://files.pythonhosted.org/packages/19/70/47b872a263e8511ca33718d96a10c17d3c853aefadeb86dc26e8421184b9/propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b", size = 234005 }, + { url = "https://files.pythonhosted.org/packages/4f/be/3b0ab8c84a22e4a3224719099c1229ddfdd8a6a1558cf75cb55ee1e35c25/propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb", size = 237270 }, + { url = "https://files.pythonhosted.org/packages/04/d8/f071bb000d4b8f851d312c3c75701e586b3f643fe14a2e3409b1b9ab3936/propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf", size = 231877 }, + { url = "https://files.pythonhosted.org/packages/93/e7/57a035a1359e542bbb0a7df95aad6b9871ebee6dce2840cb157a415bd1f3/propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2", size = 217848 }, + { url = "https://files.pythonhosted.org/packages/f0/93/d1dea40f112ec183398fb6c42fde340edd7bab202411c4aa1a8289f461b6/propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f", size = 216987 }, + { url = "https://files.pythonhosted.org/packages/62/4c/877340871251145d3522c2b5d25c16a1690ad655fbab7bb9ece6b117e39f/propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136", size = 212451 }, + { url = "https://files.pythonhosted.org/packages/7c/bb/a91b72efeeb42906ef58ccf0cdb87947b54d7475fee3c93425d732f16a61/propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325", size = 212879 }, + { url = "https://files.pythonhosted.org/packages/9b/7f/ee7fea8faac57b3ec5d91ff47470c6c5d40d7f15d0b1fccac806348fa59e/propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44", size = 222288 }, + { url = "https://files.pythonhosted.org/packages/ff/d7/acd67901c43d2e6b20a7a973d9d5fd543c6e277af29b1eb0e1f7bd7ca7d2/propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83", size = 228257 }, + { url = "https://files.pythonhosted.org/packages/8d/6f/6272ecc7a8daad1d0754cfc6c8846076a8cb13f810005c79b15ce0ef0cf2/propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544", size = 221075 }, + { url = "https://files.pythonhosted.org/packages/7c/bd/c7a6a719a6b3dd8b3aeadb3675b5783983529e4a3185946aa444d3e078f6/propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032", size = 39654 }, + { url = "https://files.pythonhosted.org/packages/88/e7/0eef39eff84fa3e001b44de0bd41c7c0e3432e7648ffd3d64955910f002d/propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e", size = 43705 }, + { url = "https://files.pythonhosted.org/packages/3d/b6/e6d98278f2d49b22b4d033c9f792eda783b9ab2094b041f013fc69bcde87/propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036", size = 11603 }, +] + +[[package]] +name = "psutil" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 }, + { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 }, + { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 }, + { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 }, + { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 }, + { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 }, + { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 }, +] + +[[package]] +name = "pycodestyle" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/b3/c832123f2699892c715fcdfebb1a8fdeffa11bb7b2350e46ecdd76b45a20/pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef", size = 103640 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/cc/227251b1471f129bc35e966bb0fceb005969023926d744139642d847b7ae/pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", size = 41725 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pydantic" +version = "1.10.19" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/2d/df30554721cdad26b241b7a92e726dd1c3716d90c92915731eb00e17a9f7/pydantic-1.10.19.tar.gz", hash = "sha256:fea36c2065b7a1d28c6819cc2e93387b43dd5d3cf5a1e82d8132ee23f36d1f10", size = 355208 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/e7/c3276090605233eeda49e3f290ef6e8dc59962f883fa7934455996986d67/pydantic-1.10.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7a8a1dd68bac29f08f0a3147de1885f4dccec35d4ea926e6e637fac03cdb4b3", size = 2582314 }, + { url = "https://files.pythonhosted.org/packages/79/4c/fea1176272425a1b972db48b5b2582165095f22d88d4a249f02439dcd3e5/pydantic-1.10.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07d00ca5ef0de65dd274005433ce2bb623730271d495a7d190a91c19c5679d34", size = 2269582 }, + { url = "https://files.pythonhosted.org/packages/85/e5/34b62732fa683d1171be07fb40f0bab3fb35bc52e56bfcae1629aee236c4/pydantic-1.10.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad57004e5d73aee36f1e25e4e73a4bc853b473a1c30f652dc8d86b0a987ffce3", size = 3088988 }, + { url = "https://files.pythonhosted.org/packages/f5/23/be131d6162cd2c4f7f29cf0a881c0e9bdbf7c37010803f8a85010bf016bf/pydantic-1.10.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dce355fe7ae53e3090f7f5fa242423c3a7b53260747aa398b4b3aaf8b25f41c3", size = 3120098 }, + { url = "https://files.pythonhosted.org/packages/f1/72/7cf7dfc8e68098751a5cee8969a967dad2acf9ce460963d071296bdeee81/pydantic-1.10.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0d32227ea9a3bf537a2273fd2fdb6d64ab4d9b83acd9e4e09310a777baaabb98", size = 3164823 }, + { url = "https://files.pythonhosted.org/packages/43/09/c7eb4c39faf7f01ebaed3fae8bf0b31388f2f7ffcefb07b2e5b9ea0f0617/pydantic-1.10.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e351df83d1c9cffa53d4e779009a093be70f1d5c6bb7068584086f6a19042526", size = 3115412 }, + { url = "https://files.pythonhosted.org/packages/80/a7/f9ecaaf940193a68d9566e2e61edce5f57d75591e59ff07a6af5fa7fb56f/pydantic-1.10.19-cp311-cp311-win_amd64.whl", hash = "sha256:d8d72553d2f3f57ce547de4fa7dc8e3859927784ab2c88343f1fc1360ff17a08", size = 2119019 }, + { url = "https://files.pythonhosted.org/packages/dc/bb/4883d3957b10b814b3bd7b7e8d51274f756e243e5eebd2f1cda36d933a32/pydantic-1.10.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d5b5b7c6bafaef90cbb7dafcb225b763edd71d9e22489647ee7df49d6d341890", size = 2410108 }, + { url = "https://files.pythonhosted.org/packages/86/80/752f888be6b068727fb893d4d875ef1cc6bb3ed3dc382f33a019fc26598a/pydantic-1.10.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:570ad0aeaf98b5e33ff41af75aba2ef6604ee25ce0431ecd734a28e74a208555", size = 2166105 }, + { url = "https://files.pythonhosted.org/packages/af/d4/346e56049cbc5ca429a1590bd0ab47cc154b1dec9e85fc920f7d5e50c889/pydantic-1.10.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0890fbd7fec9e151c7512941243d830b2d6076d5df159a2030952d480ab80a4e", size = 2800443 }, + { url = "https://files.pythonhosted.org/packages/c5/73/e1934973bf8bf436f1e1e365ed48dc51da5d7ba8b88dcd2239c962a267e0/pydantic-1.10.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec5c44e6e9eac5128a9bfd21610df3b8c6b17343285cc185105686888dc81206", size = 2830897 }, + { url = "https://files.pythonhosted.org/packages/87/5e/90e3e3c8bd70012986c22aa5f291aab948bdf419ca694833872594ff99ea/pydantic-1.10.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6eb56074b11a696e0b66c7181da682e88c00e5cebe6570af8013fcae5e63e186", size = 2863147 }, + { url = "https://files.pythonhosted.org/packages/50/c2/95be3fdfafdaf49d09369a46bbcf1f22494765479b44436e954837b818cc/pydantic-1.10.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d7d48fbc5289efd23982a0d68e973a1f37d49064ccd36d86de4543aff21e086", size = 2823662 }, + { url = "https://files.pythonhosted.org/packages/00/ba/8b1c91cc27428b5e9d340abf99f82c01987eb638937ff64ae0166354bff0/pydantic-1.10.19-cp312-cp312-win_amd64.whl", hash = "sha256:fd34012691fbd4e67bdf4accb1f0682342101015b78327eaae3543583fcd451e", size = 1950372 }, + { url = "https://files.pythonhosted.org/packages/a4/68/99ebf43b6b0321175cff0a05f0ce7fa51a8de67d390ccb8ab0d534be86a9/pydantic-1.10.19-py3-none-any.whl", hash = "sha256:2206a1752d9fac011e95ca83926a269fb0ef5536f7e053966d058316e24d929f", size = 165863 }, +] + +[[package]] +name = "pyflakes" +version = "2.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/0f/0dc480da9162749bf629dca76570972dd9cce5bedc60196a3c912875c87d/pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db", size = 68567 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/11/2a745612f1d3cbbd9c69ba14b1b43a35a2f5c3c81cd0124508c52c64307f/pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", size = 68805 }, +] + +[[package]] +name = "pygments" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, +] + +[[package]] +name = "pyjwt" +version = "2.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/68/ce067f09fca4abeca8771fe667d89cc347d1e99da3e093112ac329c6020e/pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c", size = 78825 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/84/0fdf9b18ba31d69877bd39c9cd6052b47f3761e9910c15de788e519f079f/PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850", size = 22344 }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "pyproject-api" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/19/441e0624a8afedd15bbcce96df1b80479dd0ff0d965f5ce8fde4f2f6ffad/pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496", size = 22340 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/f4/3c4ddfcc0c19c217c6de513842d286de8021af2f2ab79bbb86c00342d778/pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228", size = 13100 }, +] + +[[package]] +name = "pytest" +version = "8.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 }, +] + +[[package]] +name = "pytest-cov" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/45/9b538de8cef30e17c7b45ef42f538a94889ed6a16f2387a6c89e73220651/pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0", size = 66945 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/3b/48e79f2cd6a61dbbd4807b4ed46cb564b4fd50a76166b1c4ea5c1d9e2371/pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35", size = 22949 }, +] + +[[package]] +name = "pytest-random-order" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/e5/89654b4354b10e89969a74130f391b017dbdc113ce27f0e8ff9fa23e44e1/pytest-random-order-1.1.1.tar.gz", hash = "sha256:4472d7d34f1f1c5f3a359c4ffc5c13ed065232f31eca19c8844c1ab406e79080", size = 14626 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/02/944cf846bcd6027a1805c69fec90581f916e99ccafcbe409ae6c76833255/pytest_random_order-1.1.1-py3-none-any.whl", hash = "sha256:882727a8b597ecd06ede28654ffeb8a6d511a1e4abe1054cca7982f2e42008cd", size = 11521 }, +] + +[[package]] +name = "pytest-vcr" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "vcrpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/60/104c619483c1a42775d3f8b27293f1ecfc0728014874d065e68cb9702d49/pytest-vcr-1.0.2.tar.gz", hash = "sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896", size = 3810 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/d3/ff520d11e6ee400602711d1ece8168dcfc5b6d8146fb7db4244a6ad6a9c3/pytest_vcr-1.0.2-py2.py3-none-any.whl", hash = "sha256:2f316e0539399bea0296e8b8401145c62b6f85e9066af7e57b6151481b0d6d9c", size = 4137 }, +] + +[[package]] +name = "python-baseconv" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/33/d0/9297d7d8dd74767b4d5560d834b30b2fff17d39987c23ed8656f476e0d9b/python-baseconv-1.2.2.tar.gz", hash = "sha256:0539f8bd0464013b05ad62e0a1673f0ac9086c76b43ebf9f833053527cd9931b", size = 4929 } + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +] + +[[package]] +name = "referencing" +version = "0.35.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/73ca1f8e72fff6fa52119dbd185f73a907b1989428917b24cff660129b6d/referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", size = 62991 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/59/2056f61236782a2c86b33906c025d4f4a0b17be0161b63b70fd9e8775d36/referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de", size = 26684 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "requests-futures" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/07/9140eb28a74f5ee0f256b8c99981f6d21f9f60af5721ca694176fd080687/requests-futures-1.0.1.tar.gz", hash = "sha256:f55a4ef80070e2858e7d1e73123d2bfaeaf25b93fd34384d8ddf148e2b676373", size = 9921 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/ca/ca664ed374fab67535482532c3c05bb5dbe5850e7dff2491eb827c318e48/requests_futures-1.0.1-py2.py3-none-any.whl", hash = "sha256:4a2f5472e9911a79532137d156aa937cd9cd90fec55677f71b2976d1f7a66d38", size = 7597 }, +] + +[[package]] +name = "responses" +version = "0.25.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/24/1d67c8974daa502e860b4a5b57ad6de0d7dbc0b1160ef7148189a24a40e1/responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba", size = 77798 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/24/93293d0be0db9da1ed8dfc5e6af700fdd40e8f10a928704dd179db9f03c1/responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb", size = 55238 }, +] + +[[package]] +name = "rich" +version = "13.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 }, +] + +[[package]] +name = "robotframework" +version = "7.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/85/824b95cd3fcaf0eb6c353481b415871da4186e6414ba06a99772a48b960e/robotframework-7.1.1.zip", hash = "sha256:f85919c68c4d0837006e5f09dde1ef689f082eba2e7e64d5758753f9ee8bfea9", size = 761336 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/3c/a1f0971f4405c5accea879e84be91fb98956d778ff1cfc232410fc8558ae/robotframework-7.1.1-py3-none-any.whl", hash = "sha256:0461360be00dfb8ce1ab3f42370fa6eea3779e41c0b8d79a1f8ddcd2ec8e3679", size = 730648 }, +] + +[[package]] +name = "robotframework-pabot" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "natsort" }, + { name = "robotframework" }, + { name = "robotframework-stacktrace" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/66/6e6905d688e72f5c7b8a596d75940cae6572dfae8f1000d7ae3bb64d68ce/robotframework-pabot-2.18.0.tar.gz", hash = "sha256:3d870d98156cecd81f9a8d88deaa2174aac808d81ca1c11c561a817b0dbaa404", size = 46505 } + +[[package]] +name = "robotframework-pythonlibcore" +version = "4.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/89/5dc8c8186c897ee4b7d0b2631ebc90e679e8c8f04ea85505f96ad38aad64/robotframework-pythonlibcore-4.4.1.tar.gz", hash = "sha256:2d695b2ea906f5815179643e29182466675e682d82c5fa9d1009edfae2f84b16", size = 12835 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/64/47d8403c7c0af89b46461640a2f67e49a5778062b8dd6eb3e128aa3c50cc/robotframework_pythonlibcore-4.4.1-py2.py3-none-any.whl", hash = "sha256:e0517129522aaa039eb2a28fd3d9720b7a0be0b90d0cbcb153a6c8016bb9e973", size = 12452 }, +] + +[[package]] +name = "robotframework-requests" +version = "0.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "robotframework" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/e2/b17b940985e7b35f53767d908897870fcf4e143a2a7c2da76d152e4abc4c/robotframework-requests-0.9.7.tar.gz", hash = "sha256:c2a2839813e1dc6b299e7d336314c9982c225c5b7e001ec893dc3555c6a95740", size = 19404 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/18/03ef4d1132f50b55220af5308db91e1cc5481b8b063cac5fafa625b00f64/robotframework_requests-0.9.7-py3-none-any.whl", hash = "sha256:96315066318778cbcf5523cdb6175f5a0b8fec33275030a20dade3a3d98aeca2", size = 21055 }, +] + +[[package]] +name = "robotframework-seleniumlibrary" +version = "5.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "robotframework" }, + { name = "robotframework-pythonlibcore" }, + { name = "selenium" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/75/fe0184ba697a585d80457b74b7bed1bb290501cd6f9883d149efb4a3d9f2/robotframework-seleniumlibrary-5.1.3.tar.gz", hash = "sha256:f51a0068c6c0d8107ee1120874a3afbf2bbe751fd0782cb86a27a616d9ca30b6", size = 156935 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/d4/ca1de2d557d16fdb56916c6168d2bfccff9debc0f9823ba1f6ecdf0d5e53/robotframework_seleniumlibrary-5.1.3-py2.py3-none-any.whl", hash = "sha256:7c8211b870249db53dbb2091a5a36c09aa657f06405d112587d37d33fff7454e", size = 94652 }, +] + +[[package]] +name = "robotframework-stacktrace" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "robotframework" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/cf/6e6934c3d037ef3f5914e88494127ec4d0fea73bd566539e08b9fa2c9324/robotframework-stacktrace-0.4.1.tar.gz", hash = "sha256:e96cb36e7e9ab55104c1f7d3606249a109e0a4c3bb6a0e294bff07d54ee6f6a5", size = 12634 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/f2/be7d680eb8a23e86ea288f6c421e1e8c30c823a8f1521dc6b9f9d7b7692b/robotframework_stacktrace-0.4.1-py3-none-any.whl", hash = "sha256:018d7a55b99733e64e3cc0b134771b61a47de61de23609ed35c7bf0a53e9290e", size = 8543 }, +] + +[[package]] +name = "rpds-py" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/80/afdf96daf9b27d61483ef05b38f282121db0e38f5fd4e89f40f5c86c2a4f/rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db", size = 26335 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/61/615929ea79f5fd0b3aca000411a33bcc1753607ccc1af0ce7b05b56e6e56/rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95", size = 327267 }, + { url = "https://files.pythonhosted.org/packages/a5/f5/28e89dda55b731d78cbfea284dc9789d265a8a06523f0adf60e9b05cade7/rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9", size = 318227 }, + { url = "https://files.pythonhosted.org/packages/e4/ef/eb90feb3e384543c48e2f867551075c43a429aa4c9a44e9c4bd71f4f786b/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027", size = 361235 }, + { url = "https://files.pythonhosted.org/packages/ed/e7/8ea2d3d3398266c5c8ddd957d86003493b6d14f8f158b726dd09c8f43dee/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9", size = 369467 }, + { url = "https://files.pythonhosted.org/packages/51/25/a286abda9da7820c971a0b1abcf1d31fb81c44a1088a128ad26c77206622/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3", size = 403482 }, + { url = "https://files.pythonhosted.org/packages/7a/1e/9c3c0463fe142456dcd9e9be0ffd15b66a77adfcdf3ecf94fa2b12d95fcb/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8", size = 429943 }, + { url = "https://files.pythonhosted.org/packages/e1/fd/f1fd7e77fef8e5a442ce7fd80ba957730877515fe18d7195f646408a60ce/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d", size = 360437 }, + { url = "https://files.pythonhosted.org/packages/55/83/347932db075847f4f8172c3b53ad70fe725edd9058f0d4098080ad45e3bc/rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75", size = 382400 }, + { url = "https://files.pythonhosted.org/packages/22/9b/2a6eeab4e6752adba751cfee19bdf35d11e1073509f74883cbf14d42d682/rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f", size = 546560 }, + { url = "https://files.pythonhosted.org/packages/3c/19/6e51a141fe6f017d07b7d899b10a4af9e0f268deffacc1107d70fcd9257b/rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a", size = 549334 }, + { url = "https://files.pythonhosted.org/packages/cf/40/4ae09a07e4531278e6bee41ef3e4f166c23468135afc2c6c98917bfc28e6/rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8", size = 527855 }, + { url = "https://files.pythonhosted.org/packages/eb/45/2135be31543677687a426117c56d8b33e8b581bc4a8b7abfa53721012162/rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a", size = 200968 }, + { url = "https://files.pythonhosted.org/packages/68/fa/e66c3aaf13ef91c203ba47c102cd7c5dca92dde8837e5093577968d6d36d/rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e", size = 218502 }, + { url = "https://files.pythonhosted.org/packages/d9/5a/3aa6f5d8bacbe4f55ebf9a3c9628dad40cdb57f845124cf13c78895ea156/rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d", size = 329516 }, + { url = "https://files.pythonhosted.org/packages/df/c0/67c8c8ac850c6e3681e356a59d46315bf73bc77cb50c9a32db8ae44325b7/rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72", size = 321245 }, + { url = "https://files.pythonhosted.org/packages/64/83/bf31341f21fa594035891ff04a497dc86b210cc1a903a9cc01b097cc614f/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266", size = 363951 }, + { url = "https://files.pythonhosted.org/packages/a2/e1/8218bba36737621262df316fbb729639af25ff611cc07bfeaadc1bfa6292/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be", size = 373113 }, + { url = "https://files.pythonhosted.org/packages/39/8d/4afcd688e3ad33ec273900f42e6a41e9bd9f43cfc509b6d498683d2d0338/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab", size = 405944 }, + { url = "https://files.pythonhosted.org/packages/fa/65/3326efa721b6ecd70262aab69a26c9bc19398cdb0a2a416ef30b58326460/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7", size = 422874 }, + { url = "https://files.pythonhosted.org/packages/31/fb/48a647d0afab74289dd21a4128002d58684c22600a22c4bfb76cb9e3bfb0/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf", size = 364227 }, + { url = "https://files.pythonhosted.org/packages/f1/b0/1cdd179d7382dd52d65b1fd19c54d090b6bd0688dfbe259bb5ab7548c359/rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4", size = 386447 }, + { url = "https://files.pythonhosted.org/packages/dc/41/84ace07f31aac3a96b73a374d89106cf252f7d3274e7cae85d17a27c602d/rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca", size = 549386 }, + { url = "https://files.pythonhosted.org/packages/33/ce/bf51bc5a3aa539171ea8c7737ab5ac06cef54c79b6b2a0511afc41533c89/rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b", size = 554777 }, + { url = "https://files.pythonhosted.org/packages/76/b1/950568e55a94c2979c2b61ec24e76e648a525fbc7551ccfc1f2841e39d44/rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11", size = 530918 }, + { url = "https://files.pythonhosted.org/packages/78/84/93f00e3613426c8a7a9ca16782d2828f2ac55296dd5c6b599379d9f59ee2/rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952", size = 203112 }, + { url = "https://files.pythonhosted.org/packages/e6/08/7a186847dd78881a781d2be9b42c8e49c3261c0f4a6d0289ba9a1e4cde71/rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd", size = 220735 }, + { url = "https://files.pythonhosted.org/packages/32/3a/e69ec108eefb9b1f19ee00dde7a800b485942e62b123f01d9156a6d8569c/rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937", size = 329206 }, + { url = "https://files.pythonhosted.org/packages/f6/c0/fa689498fa3415565306398c8d2a596207c2a13d3cc03724f32514bddfbc/rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560", size = 320245 }, + { url = "https://files.pythonhosted.org/packages/68/d0/466b61007005f1b2fd8501f23e4bdee4d71c7381b61358750920d1882ac9/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b", size = 363585 }, + { url = "https://files.pythonhosted.org/packages/1e/e2/787ea3a0f4b197893c62c254e6f14929c40bbcff86922928ac4eafaa8edf/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0", size = 372302 }, + { url = "https://files.pythonhosted.org/packages/b5/ef/99f2cfe6aa128c21f1b30c66ecd348cbd59792953ca35eeb6efa38b88aa1/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44", size = 405344 }, + { url = "https://files.pythonhosted.org/packages/30/3c/9d12d0b76ecfe80a7ba4770459828dda495d72b18cafd6dfd54c67b2e282/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74", size = 422322 }, + { url = "https://files.pythonhosted.org/packages/f9/22/387aec1cd6e124adbc3b1f40c4e4152c3963ae47d78d3ca650102ea72c4f/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94", size = 363739 }, + { url = "https://files.pythonhosted.org/packages/d1/3e/0ad65b776db13d13f002ab363fe3821cd1adec500d8e05e0a81047a75f9d/rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3", size = 386579 }, + { url = "https://files.pythonhosted.org/packages/4f/3b/c68c1067b24a7df47edcc0325a825908601aba399e2d372a156edc631ad1/rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a", size = 548924 }, + { url = "https://files.pythonhosted.org/packages/ab/1c/35f1a5cce4bca71c49664f00140010a96b126e5f443ebaf6db741c25b9b7/rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3", size = 554217 }, + { url = "https://files.pythonhosted.org/packages/c8/d0/48154c152f9adb8304b21d867d28e79be3b352633fb195c03c7107a4da9a/rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976", size = 530540 }, + { url = "https://files.pythonhosted.org/packages/50/e8/78847f4e112e99fd5b7bc30fea3e4a44c20b811473d6755f944c5bf0aec7/rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202", size = 202604 }, + { url = "https://files.pythonhosted.org/packages/60/31/083e6337775e133fb0217ed0ab0752380efa6e5112f2250d592d4135a228/rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e", size = 220448 }, +] + +[[package]] +name = "rst2ansi" +version = "0.1.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/19/b29bc04524e7d1dbde13272fbb67e45a8eb24bb6d112cf10c46162b350d7/rst2ansi-0.1.5.tar.gz", hash = "sha256:1b17fb9a628d40f57933ad1a3aa952346444be069469508e73e95060da33fe6f", size = 9989 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/cb/fdb216f2b8bbec9c43655a79f2f280b2ba7822b2c8396ecceafa0c232320/rst2ansi-0.1.5-py3-none-any.whl", hash = "sha256:b2cf192e38975918d07540bba7d673550cd7d28ca7443410984e22d5ab058fb3", size = 18414 }, +] + +[[package]] +name = "salesforce-bulk" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "simple-salesforce" }, + { name = "six" }, + { name = "unicodecsv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/45/ae6a5566997e180755b0e02dac3374ae10071f44300c917a3958a41d324a/salesforce-bulk-2.2.0.tar.gz", hash = "sha256:6894e2f0d1b7a719388bbc425e1874cc096a3cc80106e93098a672709ac5ff4e", size = 12305 } + +[[package]] +name = "sarge" +version = "0.1.7.post1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/03/937f1f848ffe04c48e2dd0bd6c93da1a583d58695bb74a8957650ea6f0d0/sarge-0.1.7.post1.tar.gz", hash = "sha256:64ff42ae6ef90acbded6318ed440ed63b31a669302fb60cf41265debea282a3d", size = 25736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/fd/691a7a847559747d122a25f454b6dc4eb2a83c23ba44d161a1fdff5ede92/sarge-0.1.7.post1-py2.py3-none-any.whl", hash = "sha256:6da81592eac3fdb55708baddaf28deaad3a18f8719e3c082ea3b0405647ae72c", size = 18506 }, +] + +[[package]] +name = "secretstorage" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/a4/f48c9d79cb507ed1373477dbceaba7401fd8a23af63b837fa61f1dcd3691/SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", size = 19739 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221 }, +] + +[[package]] +name = "selenium" +version = "3.141.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/9c/9030520bf6ff0b4c98988448a93c04fcbd5b13cd9520074d8ed53569ccfe/selenium-3.141.0.tar.gz", hash = "sha256:deaf32b60ad91a4611b98d8002757f29e6f2c2d5fcaf202e1c9ad06d6772300d", size = 854669 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/d6/4294f0b4bce4de0abf13e17190289f9d0613b0a44e5dd6a7f5ca98459853/selenium-3.141.0-py2.py3-none-any.whl", hash = "sha256:2d7131d7bc5a5b99a2d9b04aaf2612c411b03b8ca1b1ee8d3de5845a9be2cb3c", size = 904577 }, +] + +[[package]] +name = "setuptools" +version = "75.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/db/722a42ffdc226e950c4757b3da7b56ff5c090bb265dccd707f7b8a3c6fee/setuptools-75.5.0.tar.gz", hash = "sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef", size = 1336032 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/df/88ccbee85aefbca071db004fdc8f8d2507d55d5a9dc27ebb93c92edb1bd8/setuptools-75.5.0-py3-none-any.whl", hash = "sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829", size = 1222710 }, +] + +[[package]] +name = "simple-salesforce" +version = "1.11.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "authlib" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/15/a8/9f3832c5229df89e115de112f57ae2d6b567ec47a885cce87c752f453423/simple-salesforce-1.11.4.tar.gz", hash = "sha256:3768fe40d04daa74409acccd9934fcf833697c6b239d9bf52d7f87a99efbe41e", size = 33101 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/84/777a88fc20615a0e42af2853f156fb436f09d0cdad64be584afa2312a8f4/simple_salesforce-1.11.4-py2.py3-none-any.whl", hash = "sha256:fbfa2940070007853d4ad437ac6064bfa55b20750f28e360cae72597450c36e5", size = 30314 }, +] + +[[package]] +name = "six" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 }, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002 }, +] + +[[package]] +name = "snowfakery" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "faker" }, + { name = "faker-edu" }, + { name = "faker-nonprofit" }, + { name = "gvgen" }, + { name = "jinja2" }, + { name = "pydantic" }, + { name = "python-baseconv" }, + { name = "python-dateutil" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "setuptools" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/94/51848ad67a409e2b8d37e10277e4ee43b8c982a47fd6e9bb114f427374b0/snowfakery-4.0.0.tar.gz", hash = "sha256:95b4a5add5b7e8483fcbf567e3b83ec7418031ce8a00fdc8542c906ec5392d91", size = 76039 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/dd/6da304a468b69f036c2185a116cf4840e36b510e65c22bebb07abeec78fd/snowfakery-4.0.0-py3-none-any.whl", hash = "sha256:38ed1faec5839d45454a2ecf0a64ec4cb352662e84694b204866a0e9dedc1a52", size = 100733 }, +] + +[[package]] +name = "soupsieve" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster" }, + { name = "babel" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils" }, + { name = "imagesize" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "requests" }, + { name = "snowballstemmer" }, + { name = "sphinxcontrib-applehelp" }, + { name = "sphinxcontrib-devhelp" }, + { name = "sphinxcontrib-htmlhelp" }, + { name = "sphinxcontrib-jsmath" }, + { name = "sphinxcontrib-qthelp" }, + { name = "sphinxcontrib-serializinghtml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125 }, +] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dd/018ce05c532a22007ac58d4f45232514cd9d6dd0ee1dc374e309db830983/sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b", size = 22496 }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300 }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530 }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705 }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071 }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743 }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072 }, +] + +[[package]] +name = "sqlalchemy" +version = "1.4.54" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(platform_machine == 'AMD64' and python_full_version >= '3.11') or (platform_machine == 'WIN32' and python_full_version >= '3.11') or (platform_machine == 'aarch64' and python_full_version >= '3.11') or (platform_machine == 'amd64' and python_full_version >= '3.11') or (platform_machine == 'ppc64le' and python_full_version >= '3.11') or (platform_machine == 'win32' and python_full_version >= '3.11') or (platform_machine == 'x86_64' and python_full_version >= '3.11')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/af/20290b55d469e873cba9d41c0206ab5461ff49d759989b3fe65010f9d265/sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a", size = 8470350 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/49/fb98983b5568e93696a25fd5bec1b789095b79a72d5f57c6effddaa81d0a/SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e", size = 1589301 }, + { url = "https://files.pythonhosted.org/packages/03/98/5a81430bbd646991346cb088a2bdc84d1bcd3dbe6b0cfc1aaa898370e5c7/SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936", size = 1629553 }, + { url = "https://files.pythonhosted.org/packages/f1/17/14e35db2b0d6deaa27691d014addbb0dd6f7e044f7ee465446a3c0c71404/SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f", size = 1627640 }, + { url = "https://files.pythonhosted.org/packages/98/62/335006a8f2c98f704f391e1a0cc01446d1b1b9c198f579f03599f55bd860/SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc", size = 1591723 }, + { url = "https://files.pythonhosted.org/packages/e2/a1/6b4b8c07082920f5445ec65c221fa33baab102aced5dcc2d87a15d3f8db4/SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5", size = 1593511 }, + { url = "https://files.pythonhosted.org/packages/a5/1b/aa9b99be95d1615f058b5827447c18505b7b3f1dfcbd6ce1b331c2107152/SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d", size = 1589983 }, + { url = "https://files.pythonhosted.org/packages/59/47/cb0fc64e5344f0a3d02216796c342525ab283f8f052d1c31a1d487d08aa0/SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4", size = 1630158 }, + { url = "https://files.pythonhosted.org/packages/c0/8b/f45dd378f6c97e8ff9332ff3d03ecb0b8c491be5bb7a698783b5a2f358ec/SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c", size = 1629232 }, + { url = "https://files.pythonhosted.org/packages/0d/3c/884fe389f5bec86a310b81e79abaa1e26e5d78dc10a84d544a6822833e47/SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f", size = 1592027 }, + { url = "https://files.pythonhosted.org/packages/01/c3/c690d037be57efd3a69cde16a2ef1bd2a905dafe869434d33836de0983d0/SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88", size = 1593827 }, +] + +[[package]] +name = "testfixtures" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/25/d7e9d05f87e2ab84657a0dfb1f24fc295d542ac2eb221531d976ea4aa1ff/testfixtures-8.3.0.tar.gz", hash = "sha256:d4c0b84af2f267610f908009b50d6f983a4e58ade22c67bab6787b5a402d59c0", size = 137420 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/4e/699671ba484b94bda0959b281ff59b24f728263befd13e060fa038ce3bc8/testfixtures-8.3.0-py3-none-any.whl", hash = "sha256:3d1e0e0005c4d6ac2a2ab27916704c6471047f0d2f78f2e54adf20abdacc7b10", size = 105085 }, +] + +[[package]] +name = "tomli" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/e4/1b6cbcc82d8832dd0ce34767d5c560df8a3547ad8cbc427f34601415930a/tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8", size = 16622 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/f7/4da0ffe1892122c9ea096c57f64c2753ae5dd3ce85488802d11b0992cc6d/tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391", size = 13750 }, +] + +[[package]] +name = "tox" +version = "4.23.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "chardet" }, + { name = "colorama" }, + { name = "filelock" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "pluggy" }, + { name = "pyproject-api" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/86/32b10f91b4b975a37ac402b0f9fa016775088e0565c93602ba0b3c729ce8/tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c", size = 189998 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/c0/124b73d01c120e917383bc6c53ebc34efdf7243faa9fca64d105c94cf2ab/tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38", size = 166758 }, +] + +[[package]] +name = "typeguard" +version = "2.13.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/38/c61bfcf62a7b572b5e9363a802ff92559cb427ee963048e1442e3aef7490/typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4", size = 40604 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/bb/d43e5c75054e53efce310e79d63df0ac3f25e34c926be5dffb7d283fb2a8/typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1", size = 17605 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "unicodecsv" +version = "0.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/a4/691ab63b17505a26096608cc309960b5a6bdf39e4ba1a793d5f9b1a53270/unicodecsv-0.14.1.tar.gz", hash = "sha256:018c08037d48649a0412063ff4eda26eaa81eff1546dbffa51fa5293276ff7fc", size = 10267 } + +[[package]] +name = "uritemplate" +version = "4.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/5a/4742fdba39cd02a56226815abfa72fe0aa81c33bed16ed045647d6000eba/uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0", size = 273898 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c0/7461b49cd25aeece13766f02ee576d1db528f1c37ce69aee300e075b485b/uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e", size = 10356 }, +] + +[[package]] +name = "urllib3" +version = "1.26.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225 }, +] + +[[package]] +name = "vcrpy" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "urllib3", marker = "python_full_version >= '3.11'" }, + { name = "wrapt" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/16/4e/fff59599826793f9e3460c22c0af0377abb27dc9781a7d5daca8cb03da25/vcrpy-6.0.2.tar.gz", hash = "sha256:88e13d9111846745898411dbc74a75ce85870af96dd320d75f1ee33158addc09", size = 85472 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/ed/25d19705791d3fccc84423d564695421a75b4e08e8ab15a004a49068742d/vcrpy-6.0.2-py2.py3-none-any.whl", hash = "sha256:40370223861181bc76a5e5d4b743a95058bb1ad516c3c08570316ab592f56cad", size = 42431 }, +] + +[[package]] +name = "virtualenv" +version = "20.27.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8c/b3/7b6a79c5c8cf6d90ea681310e169cf2db2884f4d583d16c6e1d5a75a4e04/virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba", size = 6491145 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/92/78324ff89391e00c8f4cf6b8526c41c6ef36b4ea2d2c132250b1a6fc2b8d/virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4", size = 3117838 }, +] + +[[package]] +name = "wrapt" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/4c/063a912e20bcef7124e0df97282a8af3ff3e4b603ce84c481d6d7346be0a/wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", size = 53972 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/03/c188ac517f402775b90d6f312955a5e53b866c964b32119f2ed76315697e/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", size = 37313 }, + { url = "https://files.pythonhosted.org/packages/0f/16/ea627d7817394db04518f62934a5de59874b587b792300991b3c347ff5e0/wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", size = 38164 }, + { url = "https://files.pythonhosted.org/packages/7f/a7/f1212ba098f3de0fd244e2de0f8791ad2539c03bef6c05a9fcb03e45b089/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", size = 80890 }, + { url = "https://files.pythonhosted.org/packages/b7/96/bb5e08b3d6db003c9ab219c487714c13a237ee7dcc572a555eaf1ce7dc82/wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", size = 73118 }, + { url = "https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", size = 80746 }, + { url = "https://files.pythonhosted.org/packages/11/fb/18ec40265ab81c0e82a934de04596b6ce972c27ba2592c8b53d5585e6bcd/wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", size = 85668 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/0ecb1fa23145560431b970418dce575cfaec555ab08617d82eb92afc7ccf/wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", size = 78556 }, + { url = "https://files.pythonhosted.org/packages/25/62/cd284b2b747f175b5a96cbd8092b32e7369edab0644c45784871528eb852/wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", size = 85712 }, + { url = "https://files.pythonhosted.org/packages/e5/a7/47b7ff74fbadf81b696872d5ba504966591a3468f1bc86bca2f407baef68/wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", size = 35327 }, + { url = "https://files.pythonhosted.org/packages/cf/c3/0084351951d9579ae83a3d9e38c140371e4c6b038136909235079f2e6e78/wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", size = 37523 }, + { url = "https://files.pythonhosted.org/packages/92/17/224132494c1e23521868cdd57cd1e903f3b6a7ba6996b7b8f077ff8ac7fe/wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", size = 37614 }, + { url = "https://files.pythonhosted.org/packages/6a/d7/cfcd73e8f4858079ac59d9db1ec5a1349bc486ae8e9ba55698cc1f4a1dff/wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", size = 38316 }, + { url = "https://files.pythonhosted.org/packages/7e/79/5ff0a5c54bda5aec75b36453d06be4f83d5cd4932cc84b7cb2b52cee23e2/wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", size = 86322 }, + { url = "https://files.pythonhosted.org/packages/c4/81/e799bf5d419f422d8712108837c1d9bf6ebe3cb2a81ad94413449543a923/wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809", size = 79055 }, + { url = "https://files.pythonhosted.org/packages/62/62/30ca2405de6a20448ee557ab2cd61ab9c5900be7cbd18a2639db595f0b98/wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b", size = 87291 }, + { url = "https://files.pythonhosted.org/packages/49/4e/5d2f6d7b57fc9956bf06e944eb00463551f7d52fc73ca35cfc4c2cdb7aed/wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81", size = 90374 }, + { url = "https://files.pythonhosted.org/packages/a6/9b/c2c21b44ff5b9bf14a83252a8b973fb84923764ff63db3e6dfc3895cf2e0/wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9", size = 83896 }, + { url = "https://files.pythonhosted.org/packages/14/26/93a9fa02c6f257df54d7570dfe8011995138118d11939a4ecd82cb849613/wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c", size = 91738 }, + { url = "https://files.pythonhosted.org/packages/a2/5b/4660897233eb2c8c4de3dc7cefed114c61bacb3c28327e64150dc44ee2f6/wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc", size = 35568 }, + { url = "https://files.pythonhosted.org/packages/5c/cc/8297f9658506b224aa4bd71906447dea6bb0ba629861a758c28f67428b91/wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8", size = 37653 }, + { url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362 }, +] + +[[package]] +name = "xmltodict" +version = "0.14.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/50/05/51dcca9a9bf5e1bce52582683ce50980bcadbc4fa5143b9f2b19ab99958f/xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553", size = 51942 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/45/fc303eb433e8a2a271739c98e953728422fa61a3c1f36077a49e395c972e/xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac", size = 9981 }, +] + +[[package]] +name = "yarl" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/54/9c/9c0a9bfa683fc1be7fdcd9687635151544d992cccd48892dc5e0a5885a29/yarl-1.17.1.tar.gz", hash = "sha256:067a63fcfda82da6b198fa73079b1ca40b7c9b7994995b6ee38acda728b64d47", size = 178163 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/0f/ce6a2c8aab9946446fb27f1e28f0fd89ce84ae913ab18a92d18078a1c7ed/yarl-1.17.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cbad927ea8ed814622305d842c93412cb47bd39a496ed0f96bfd42b922b4a217", size = 140727 }, + { url = "https://files.pythonhosted.org/packages/9d/df/204f7a502bdc3973cd9fc29e7dfad18ae48b3acafdaaf1ae07c0f41025aa/yarl-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fca4b4307ebe9c3ec77a084da3a9d1999d164693d16492ca2b64594340999988", size = 93560 }, + { url = "https://files.pythonhosted.org/packages/a2/e1/f4d522ae0560c91a4ea31113a50f00f85083be885e1092fc6e74eb43cb1d/yarl-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff5c6771c7e3511a06555afa317879b7db8d640137ba55d6ab0d0c50425cab75", size = 91497 }, + { url = "https://files.pythonhosted.org/packages/f1/82/783d97bf4a226f1a2e59b1966f2752244c2bf4dc89bc36f61d597b8e34e5/yarl-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b29beab10211a746f9846baa39275e80034e065460d99eb51e45c9a9495bcca", size = 339446 }, + { url = "https://files.pythonhosted.org/packages/e5/ff/615600647048d81289c80907165de713fbc566d1e024789863a2f6563ba3/yarl-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a52a1ffdd824fb1835272e125385c32fd8b17fbdefeedcb4d543cc23b332d74", size = 354616 }, + { url = "https://files.pythonhosted.org/packages/a5/04/bfb7adb452bd19dfe0c35354ffce8ebc3086e028e5f8270e409d17da5466/yarl-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58c8e9620eb82a189c6c40cb6b59b4e35b2ee68b1f2afa6597732a2b467d7e8f", size = 351801 }, + { url = "https://files.pythonhosted.org/packages/10/e0/efe21edacdc4a638ce911f8cabf1c77cac3f60e9819ba7d891b9ceb6e1d4/yarl-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d216e5d9b8749563c7f2c6f7a0831057ec844c68b4c11cb10fc62d4fd373c26d", size = 343381 }, + { url = "https://files.pythonhosted.org/packages/63/f9/7bc7e69857d6fc3920ecd173592f921d5701f4a0dd3f2ae293b386cfa3bf/yarl-1.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:881764d610e3269964fc4bb3c19bb6fce55422828e152b885609ec176b41cf11", size = 337093 }, + { url = "https://files.pythonhosted.org/packages/93/52/99da61947466275ff17d7bc04b0ac31dfb7ec699bd8d8985dffc34c3a913/yarl-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8c79e9d7e3d8a32d4824250a9c6401194fb4c2ad9a0cec8f6a96e09a582c2cc0", size = 346619 }, + { url = "https://files.pythonhosted.org/packages/91/8a/8aaad86a35a16e485ba0e5de0d2ae55bf8dd0c9f1cccac12be4c91366b1d/yarl-1.17.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:299f11b44d8d3a588234adbe01112126010bd96d9139c3ba7b3badd9829261c3", size = 344347 }, + { url = "https://files.pythonhosted.org/packages/af/b6/97f29f626b4a1768ffc4b9b489533612cfcb8905c90f745aade7b2eaf75e/yarl-1.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cc7d768260f4ba4ea01741c1b5fe3d3a6c70eb91c87f4c8761bbcce5181beafe", size = 350316 }, + { url = "https://files.pythonhosted.org/packages/d7/98/8e0e8b812479569bdc34d66dd3e2471176ca33be4ff5c272a01333c4b269/yarl-1.17.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:de599af166970d6a61accde358ec9ded821234cbbc8c6413acfec06056b8e860", size = 361336 }, + { url = "https://files.pythonhosted.org/packages/9e/d3/d1507efa0a85c25285f8eb51df9afa1ba1b6e446dda781d074d775b6a9af/yarl-1.17.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2b24ec55fad43e476905eceaf14f41f6478780b870eda5d08b4d6de9a60b65b4", size = 365350 }, + { url = "https://files.pythonhosted.org/packages/22/ba/ee7f1830449c96bae6f33210b7d89e8aaf3079fbdaf78ac398e50a9da404/yarl-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9fb815155aac6bfa8d86184079652c9715c812d506b22cfa369196ef4e99d1b4", size = 357689 }, + { url = "https://files.pythonhosted.org/packages/a0/85/321c563dc5afe1661108831b965c512d185c61785400f5606006507d2e18/yarl-1.17.1-cp311-cp311-win32.whl", hash = "sha256:7615058aabad54416ddac99ade09a5510cf77039a3b903e94e8922f25ed203d7", size = 83635 }, + { url = "https://files.pythonhosted.org/packages/bc/da/543a32c00860588ff1235315b68f858cea30769099c32cd22b7bb266411b/yarl-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:14bc88baa44e1f84164a392827b5defb4fa8e56b93fecac3d15315e7c8e5d8b3", size = 90218 }, + { url = "https://files.pythonhosted.org/packages/5d/af/e25615c7920396219b943b9ff8b34636ae3e1ad30777649371317d7f05f8/yarl-1.17.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:327828786da2006085a4d1feb2594de6f6d26f8af48b81eb1ae950c788d97f61", size = 141839 }, + { url = "https://files.pythonhosted.org/packages/83/5e/363d9de3495c7c66592523f05d21576a811015579e0c87dd38c7b5788afd/yarl-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cc353841428d56b683a123a813e6a686e07026d6b1c5757970a877195f880c2d", size = 94125 }, + { url = "https://files.pythonhosted.org/packages/e3/a2/b65447626227ebe36f18f63ac551790068bf42c69bb22dfa3ae986170728/yarl-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c73df5b6e8fabe2ddb74876fb82d9dd44cbace0ca12e8861ce9155ad3c886139", size = 92048 }, + { url = "https://files.pythonhosted.org/packages/a1/f5/2ef86458446f85cde10582054fd5113495ef8ce8477da35aaaf26d2970ef/yarl-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bdff5e0995522706c53078f531fb586f56de9c4c81c243865dd5c66c132c3b5", size = 331472 }, + { url = "https://files.pythonhosted.org/packages/f3/6b/1ba79758ba352cdf2ad4c20cab1b982dd369aa595bb0d7601fc89bf82bee/yarl-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:06157fb3c58f2736a5e47c8fcbe1afc8b5de6fb28b14d25574af9e62150fcaac", size = 341260 }, + { url = "https://files.pythonhosted.org/packages/2d/41/4e07c2afca3f9ed3da5b0e38d43d0280d9b624a3d5c478c425e5ce17775c/yarl-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1654ec814b18be1af2c857aa9000de7a601400bd4c9ca24629b18486c2e35463", size = 340882 }, + { url = "https://files.pythonhosted.org/packages/c3/c0/cd8e94618983c1b811af082e1a7ad7764edb3a6af2bc6b468e0e686238ba/yarl-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6595c852ca544aaeeb32d357e62c9c780eac69dcd34e40cae7b55bc4fb1147", size = 336648 }, + { url = "https://files.pythonhosted.org/packages/ac/fc/73ec4340d391ffbb8f34eb4c55429784ec9f5bd37973ce86d52d67135418/yarl-1.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:459e81c2fb920b5f5df744262d1498ec2c8081acdcfe18181da44c50f51312f7", size = 325019 }, + { url = "https://files.pythonhosted.org/packages/57/48/da3ebf418fc239d0a156b3bdec6b17a5446f8d2dea752299c6e47b143a85/yarl-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7e48cdb8226644e2fbd0bdb0a0f87906a3db07087f4de77a1b1b1ccfd9e93685", size = 342841 }, + { url = "https://files.pythonhosted.org/packages/5d/79/107272745a470a8167924e353a5312eb52b5a9bb58e22686adc46c94f7ec/yarl-1.17.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d9b6b28a57feb51605d6ae5e61a9044a31742db557a3b851a74c13bc61de5172", size = 341433 }, + { url = "https://files.pythonhosted.org/packages/30/9c/6459668b3b8dcc11cd061fc53e12737e740fb6b1575b49c84cbffb387b3a/yarl-1.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e594b22688d5747b06e957f1ef822060cb5cb35b493066e33ceac0cf882188b7", size = 344927 }, + { url = "https://files.pythonhosted.org/packages/c5/0b/93a17ed733aca8164fc3a01cb7d47b3f08854ce4f957cce67a6afdb388a0/yarl-1.17.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5f236cb5999ccd23a0ab1bd219cfe0ee3e1c1b65aaf6dd3320e972f7ec3a39da", size = 355732 }, + { url = "https://files.pythonhosted.org/packages/9a/63/ead2ed6aec3c59397e135cadc66572330325a0c24cd353cd5c94f5e63463/yarl-1.17.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a2a64e62c7a0edd07c1c917b0586655f3362d2c2d37d474db1a509efb96fea1c", size = 362123 }, + { url = "https://files.pythonhosted.org/packages/89/bf/f6b75b4c2fcf0e7bb56edc0ed74e33f37fac45dc40e5a52a3be66b02587a/yarl-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d0eea830b591dbc68e030c86a9569826145df485b2b4554874b07fea1275a199", size = 356355 }, + { url = "https://files.pythonhosted.org/packages/45/1f/50a0257cd07eef65c8c65ad6a21f5fb230012d659e021aeb6ac8a7897bf6/yarl-1.17.1-cp312-cp312-win32.whl", hash = "sha256:46ddf6e0b975cd680eb83318aa1d321cb2bf8d288d50f1754526230fcf59ba96", size = 83279 }, + { url = "https://files.pythonhosted.org/packages/bc/82/fafb2c1268d63d54ec08b3a254fbe51f4ef098211501df646026717abee3/yarl-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:117ed8b3732528a1e41af3aa6d4e08483c2f0f2e3d3d7dca7cf538b3516d93df", size = 89590 }, + { url = "https://files.pythonhosted.org/packages/06/1e/5a93e3743c20eefbc68bd89334d9c9f04f3f2334380f7bbf5e950f29511b/yarl-1.17.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5d1d42556b063d579cae59e37a38c61f4402b47d70c29f0ef15cee1acaa64488", size = 139974 }, + { url = "https://files.pythonhosted.org/packages/a1/be/4e0f6919013c7c5eaea5c31811c551ccd599d2fc80aa3dd6962f1bbdcddd/yarl-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c0167540094838ee9093ef6cc2c69d0074bbf84a432b4995835e8e5a0d984374", size = 93364 }, + { url = "https://files.pythonhosted.org/packages/73/f0/650f994bc491d0cb85df8bb45392780b90eab1e175f103a5edc61445ff67/yarl-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2f0a6423295a0d282d00e8701fe763eeefba8037e984ad5de44aa349002562ac", size = 91177 }, + { url = "https://files.pythonhosted.org/packages/f3/e8/9945ed555d14b43ede3ae8b1bd73e31068a694cad2b9d3cad0a28486c2eb/yarl-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b078134f48552c4d9527db2f7da0b5359abd49393cdf9794017baec7506170", size = 333086 }, + { url = "https://files.pythonhosted.org/packages/a6/c0/7d167e48e14d26639ca066825af8da7df1d2fcdba827e3fd6341aaf22a3b/yarl-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d401f07261dc5aa36c2e4efc308548f6ae943bfff20fcadb0a07517a26b196d8", size = 343661 }, + { url = "https://files.pythonhosted.org/packages/fa/81/80a266517531d4e3553aecd141800dbf48d02e23ebd52909e63598a80134/yarl-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5f1ac7359e17efe0b6e5fec21de34145caef22b260e978336f325d5c84e6938", size = 345196 }, + { url = "https://files.pythonhosted.org/packages/b0/77/6adc482ba7f2dc6c0d9b3b492e7cd100edfac4cfc3849c7ffa26fd7beb1a/yarl-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f63d176a81555984e91f2c84c2a574a61cab7111cc907e176f0f01538e9ff6e", size = 338743 }, + { url = "https://files.pythonhosted.org/packages/6d/cc/f0c4c0b92ff3ada517ffde2b127406c001504b225692216d969879ada89a/yarl-1.17.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e275792097c9f7e80741c36de3b61917aebecc08a67ae62899b074566ff8556", size = 326719 }, + { url = "https://files.pythonhosted.org/packages/18/3b/7bfc80d3376b5fa162189993a87a5a6a58057f88315bd0ea00610055b57a/yarl-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:81713b70bea5c1386dc2f32a8f0dab4148a2928c7495c808c541ee0aae614d67", size = 345826 }, + { url = "https://files.pythonhosted.org/packages/2e/66/cf0b0338107a5c370205c1a572432af08f36ca12ecce127f5b558398b4fd/yarl-1.17.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:aa46dce75078fceaf7cecac5817422febb4355fbdda440db55206e3bd288cfb8", size = 340335 }, + { url = "https://files.pythonhosted.org/packages/2f/52/b084b0eec0fd4d2490e1d33ace3320fad704c5f1f3deaa709f929d2d87fc/yarl-1.17.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1ce36ded585f45b1e9bb36d0ae94765c6608b43bd2e7f5f88079f7a85c61a4d3", size = 345301 }, + { url = "https://files.pythonhosted.org/packages/ef/38/9e2036d948efd3bafcdb4976cb212166fded76615f0dfc6c1492c4ce4784/yarl-1.17.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2d374d70fdc36f5863b84e54775452f68639bc862918602d028f89310a034ab0", size = 354205 }, + { url = "https://files.pythonhosted.org/packages/81/c1/13dfe1e70b86811733316221c696580725ceb1c46d4e4db852807e134310/yarl-1.17.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2d9f0606baaec5dd54cb99667fcf85183a7477f3766fbddbe3f385e7fc253299", size = 360501 }, + { url = "https://files.pythonhosted.org/packages/91/87/756e05c74cd8bf9e71537df4a2cae7e8211a9ebe0d2350a3e26949e1e41c/yarl-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b0341e6d9a0c0e3cdc65857ef518bb05b410dbd70d749a0d33ac0f39e81a4258", size = 359452 }, + { url = "https://files.pythonhosted.org/packages/06/b2/b2bb09c1e6d59e1c9b1b36a86caa473e22c3dbf26d1032c030e9bfb554dc/yarl-1.17.1-cp313-cp313-win32.whl", hash = "sha256:2e7ba4c9377e48fb7b20dedbd473cbcbc13e72e1826917c185157a137dac9df2", size = 308904 }, + { url = "https://files.pythonhosted.org/packages/f3/27/f084d9a5668853c1f3b246620269b14ee871ef3c3cc4f3a1dd53645b68ec/yarl-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:949681f68e0e3c25377462be4b658500e85ca24323d9619fdc41f68d46a1ffda", size = 314637 }, + { url = "https://files.pythonhosted.org/packages/52/ad/1fe7ff5f3e8869d4c5070f47b96bac2b4d15e67c100a8278d8e7876329fc/yarl-1.17.1-py3-none-any.whl", hash = "sha256:f1790a4b1e8e8e028c391175433b9c8122c39b46e1663228158e61e6f915bf06", size = 44352 }, +] + +[[package]] +name = "zipp" +version = "3.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 }, +]