diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..5fd19be --- /dev/null +++ b/.flake8 @@ -0,0 +1,3 @@ +[flake8] +#ignore = ... +max-line-length=180 diff --git a/.github/workflows/.release-created.yml b/.github/workflows/.release-created.yml new file mode 100644 index 0000000..d372132 --- /dev/null +++ b/.github/workflows/.release-created.yml @@ -0,0 +1,63 @@ + +name: Release Branch Created + +# Run whenever a ref is created https://docs.github.com/en/actions/reference/events-that-trigger-workflows#create +on: + create + +jobs: + # First job in the workflow builds and verifies the software artifacts + bump: + name: Bump minor version on develop + # The type of runner that the job will run on + runs-on: ubuntu-latest + # Only run if ref created was a release branch + if: + ${{ startsWith(github.ref, 'refs/heads/release/') }} + steps: + # Checks-out the develop branch + - uses: actions/checkout@v4 + with: + ref: 'refs/heads/develop' + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install Poetry + uses: abatilo/actions-poetry@v2.0.0 + with: + poetry-version: 1.3.2 + - name: Bump minor version + env: + COMMIT_VERSION: ${{ github.ref }} + run: | + + # only update the develop branch if were making #.#.0 release + # Get the branch name from the GITHUB_REF environment variable + branch_name=${GITHUB_REF#refs/heads/} + + # Extract the last number in the branch name using a regular expression + if [[ $branch_name =~ /([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then + + first_number=${BASH_REMATCH[1]} + middle_number=${BASH_REMATCH[2]} + last_number=${BASH_REMATCH[3]} + + # Increment the middle number by 1 + incremented_middle_number=$((middle_number + 1)) + + # Check if the last number is '0' + if [ "$last_number" == "0" ]; then + + update_version=$first_number.$incremented_middle_number.$last_number-alpha.1 + + poetry version $update_version + echo "software_version=$update_version" >> $GITHUB_ENV + + git config --global user.name 'podaac-tig bot' + git config --global user.email 'podaac-tig@noreply.github.com' + git commit -am "/version ${{ env.software_version }}" + git push + + fi + + fi \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..0a292f1 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,347 @@ +# Build Pipeline for hitide-backfill-lambdas +name: Build +# Controls when the workflow will run +on: + # Triggers the workflow on push events + push: + branches: [ develop, release/**, main, feature/**, issue/**, issues/**, dependabot/** ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + # First job in the workflow installs and verifies the software + build: + name: Build, Test, Verify, Publish + # The type of runner that the job will run on + runs-on: ubuntu-latest + steps: + ######################################################################### + # Environment Setup + ######################################################################### + # NOTE: This step is platform-specific + # Checks out this repository and sets up the build/test environment with + # gradle + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: Install Poetry + uses: abatilo/actions-poetry@v3 + with: + poetry-version: 1.8.1 + + ######################################################################### + # Versioning (featuring weird gradle output work-arounds) + ######################################################################### + # NOTE: This step is platform-specific + # Retrieve version information for use in the other versioning steps + - name: Get version + id: get-version + run: | + echo "the_service=${{ github.event.repository.name }}" >> $GITHUB_ENV + echo "the_env=$(printenv)" >> $GITHUB_ENV + echo "${{ github.event.repository.name }}" + echo "pyproject_name=$(poetry version | awk '{print $1}')" >> $GITHUB_ENV + poetry version > .temp_version.out + cat .temp_version.out + the_version=$(cat .temp_version.out |grep -v Downloading |grep -v '%' |sed -e 's/podaac-hitide-backfill-tool *//') + rm .temp_version.out + echo "old_version=$the_version" >> $GITHUB_ENV + echo "the_version=$the_version" >> $GITHUB_ENV + echo "Initial Version: $the_version" + + # Pre-Alpha Logic - Use the project version number and add the short hash + # to it + - name: Bump pre-alpha version + # If triggered by push to a feature branch + if: | + startsWith(github.ref, 'refs/heads/issue') || + startsWith(github.ref, 'refs/heads/dependabot/') || + startsWith(github.ref, 'refs/heads/feature/') + # At pre-alpha, append git-commit to version, set it into gradle + # property, read the version out and set to build_service_version + run: | + the_version=$(echo "${{ env.the_version }}" | sed -e "s/-alpha.*//g") + the_version=$(echo "$the_version" | sed -e "s/-rc.*//g") + new_version="${the_version}+$(git rev-parse --short HEAD)" + echo "the_version=${new_version}" >> $GITHUB_ENV + echo "software_version=${new_version}" >> $GITHUB_ENV + echo "new_version=${new_version}" >> $GITHUB_ENV + echo "Github REF: ${{ github.ref }}" + echo "TARGET_ENV_UPPERCASE=SIT" >> $GITHUB_ENV + + # Alpha Logic - Use the project version number and add -alpha.1 or bump + # alpha number + - name: Bump alpha version + env: + VERSION: ${{ env.the_version }} + # If triggered by push to the develop branch + if: ${{ github.ref == 'refs/heads/develop' }} + run: | + if [[ ${VERSION} == *"-alpha"* ]]; then + alpha_number=$(echo "${VERSION}" | sed -e "s/^.*-alpha.//g") + alpha_number=$(echo "$alpha_number" | sed -e "s/-rc.*//g") + alpha_number=$((alpha_number+1)) + the_version=$(echo "$the_version" | sed -e "s/-alpha.*//g") + the_version=$(echo "$the_version" | sed -e "s/-rc.*//g") + the_version="${the_version}-alpha.$alpha_number" + echo "software_version=${the_version}" >> $GITHUB_ENV + echo "the_version=${the_version}" >> $GITHUB_ENV + else + the_version="${{ env.the_version }}-alpha.1" + echo "software_version=${the_version}" >> $GITHUB_ENV + echo "the_version=${the_version}" >> $GITHUB_ENV + fi + echo "new_version=${the_version}" >> $GITHUB_ENV + echo "venue=sit" >> $GITHUB_ENV + echo "TARGET_ENV_UPPERCASE=SIT" >> $GITHUB_ENV + + # Release Candidate Logic - Remove -alpha* and add -rc.1, or bump the rc + # number + - name: Bump rc version + if: ${{ startsWith(github.ref, 'refs/heads/release/') }} + env: + VERSION: ${{ env.the_version }} + COMMIT_VERSION: ${{ github.ref }} + run: | + commit_version=$COMMIT_VERSION + commit_version=$(echo "${commit_version}" |sed -e "s/^.*\///g") + commit_version=$(echo "${commit_version}" |sed -e "s/-alpha.*//g") + commit_version=$(echo "${commit_version}" |sed -e "s/-rc.*//g") + echo "COMMIT VERSION: $commit_version" + file_version=${VERSION} + file_version=$(echo "${file_version}" |sed -e "s/-alpha.*//g") + file_version=$(echo "${file_version}" |sed -e "s/-rc.*//g") + echo "FILE VERSION: $file_version" + if [[ "$commit_version" != "$file_version" ]]; then + echo "Commit version and file version are different, using commit version" + VERSION=$commit_version + fi + if [[ ${VERSION} == *"-rc"* ]]; then + echo "Bumping up the release candidate number from ${VERSION}" + rc_number=$(echo "${VERSION}" | sed -e "s/^.*-rc.//g") + rc_number=$(echo "${rc_number}" | sed -e "s/-alpha.*//g") + rc_number=$((rc_number+1)) + the_version=$(echo "$the_version" | sed -e "s/-rc.*//g") + the_version=$(echo "$the_version" | sed -e "s/-alpha.*//g") + VERSION="${the_version}-rc.${rc_number}" + else + echo "Initializing the first release candidate for ${VERSION}" + VERSION=$(echo "${VERSION}" |sed -e "s/-alpha.*//g") + VERSION="${VERSION}-rc.1" + fi + echo "software_version=${VERSION}" >> $GITHUB_ENV + echo "the_version=${VERSION}" >> $GITHUB_ENV + echo "new_version=${VERSION}" >> $GITHUB_ENV + echo "venue=uat" >> $GITHUB_ENV + echo "TARGET_ENV_UPPERCASE=UAT" >> $GITHUB_ENV + + + # Release Logic + - name: Release version + # If triggered by push to the main branch + if: ${{ startsWith(github.ref, 'refs/heads/main') }} + env: + VERSION: ${{ env.the_version }} + # Remove -rc.* from end of version string + run: | + software_version=$(echo "${VERSION}" | sed -e s/-rc.*//g) + software_version=$(echo "${software_version}" | sed -e s/-alpha.*//g) + echo "software_version=$software_version" >> $GITHUB_ENV + echo "new_version=$software_version" >> $GITHUB_ENV + echo "the_version=$software_version" >> $GITHUB_ENV + echo "venue=ops" >> $GITHUB_ENV + echo "TARGET_ENV_UPPERCASE=OPS" >> $GITHUB_ENV + + + ######################################################################### + # Versioning Summary + ######################################################################### + - name: Versioning Summary + run: | + echo "the_service: ${{ env.the_service }}" + echo "old version : ${{ env.old_version }}" + echo "new version : ${{ env.new_version }}" + echo "the_env: ${{ env.the_env }}" + echo "software_version: ${{ env.software_version }}" + echo "GITHUB REF: ${{ github.ref }}" + echo "VENUE: ${{ env.venue }}" + echo "Target Env Uppercase: ${{ env.TARGET_ENV_UPPERCASE }}" + + original_env_value="${TARGET_ENV_UPPERCASE}" + lowercase_value=$(echo "${original_env_value}" | tr '[:upper:]' '[:lower:]') + echo "TARGET_ENV_LOWERCASE=${lowercase_value}" >> $GITHUB_ENV + + # NOTE: This step is platform-specific + # Update the version number in the application package itself + - name: Update version number in the application package + run: | + poetry version ${{ env.the_version }} + + + ######################################################################### + # Install & Test & Snyk + ######################################################################### + # NOTE: This step is platform-specific + # These are gradle-specific steps for installing the application + - name: Install Software + run: | + pip install pylint + pip install pytest + poetry build + poetry install + + # This is where tests go + - name: Run Poetry Tests + run: | + poetry run pylint podaac + poetry run flake8 podaac + poetry run pytest --junitxml=build/reports/pytest.xml --cov=podaac/ --cov-report=html -m "not aws and not integration" tests/ + + - name: Run Snyk as a blocking step + uses: snyk/actions/python@master + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + with: + command: test + args: > + --org=${{ secrets.SNYK_ORG_ID }} + --project-name=${{ github.repository }} + --severity-threshold=high + --fail-on=all + + - name: Run Snyk on Python + uses: snyk/actions/python@master + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + with: + command: monitor + args: > + --org=${{ secrets.SNYK_ORG_ID }} + --project-name=${{ github.repository }} + + + ######################################################################### + # Publish new version numbers + ######################################################################### + + - name: Quick check for changes + id: check_changes + if: | + github.ref == 'refs/heads/develop' || + github.ref == 'refs/heads/main' || + startsWith(github.ref, 'refs/heads/release') + run: | + if [ -n "$(git status --porcelain)" ]; then + echo "changes=true" >> $GITHUB_OUTPUT + else + echo "changes=false" >> $GITHUB_OUTPUT + fi + + - name: Commit Version Bump + # If building develop, a release branch, or main then we commit the version bump back to the repo + if: steps.check_changes.outputs.changes == 'true' + run: | + git config user.name "${GITHUB_ACTOR}" + git config user.email "${GITHUB_ACTOR}@users.noreply.github.com" + git commit -am "/version ${{ env.the_version }}" + git push + + - name: Push Tag + env: + VERSION: ${{ env.the_version }} + if: | + github.ref == 'refs/heads/develop' || + github.ref == 'refs/heads/main' || + startsWith(github.ref, 'refs/heads/release') + run: | + git config user.name "${GITHUB_ACTOR}" + git config user.email "${GITHUB_ACTOR}@users.noreply.github.com" + git tag -a "${VERSION}" -m "Version ${VERSION}" + git push origin "${VERSION}" + + + ######################################################################### + # Publish to pypi.org + ######################################################################### + - name: Publish to test.pypi.org + id: pypi-test-publish + if: | + github.ref == 'refs/heads/develop' || + startsWith(github.ref, 'refs/heads/release') + env: + POETRY_PYPI_TOKEN_TESTPYPI: ${{secrets.TEST_PYPI_API_TOKEN}} + run: | + poetry config repositories.testpypi https://test.pypi.org/legacy/ + poetry publish -r testpypi + + - name: Publish to pypi.org + if: ${{ github.ref == 'refs/heads/main' }} + id: pypi-publish + env: + POETRY_PYPI_TOKEN_PYPI: ${{secrets.PYPI_API_TOKEN}} + run: | + poetry publish --skip-existing + + ## Due to observed delays between upload and availability, wait for the package to become available + - name: Wait for package + if: | + steps.pypi-test-publish.conclusion == 'success' || + steps.pypi-publish.conclusion == 'success' + run: | + pip install tenacity logging + python3 ${GITHUB_WORKSPACE}/.github/workflows/wait-for-pypi.py ${{env.pyproject_name}}==${{ env.software_version }} + + + ######################################################################### + # Deploy to AWS via Terraform + ######################################################################### + - name: Deploy Env Override + if: | + github.event.head_commit.message == '/deploy sit' || + github.event.head_commit.message == '/deploy uat' + run: | + message="${{ github.event.head_commit.message }}" + trimmed_message=${message:1} # Remove leading slash + override_env=$(echo "$trimmed_message" | grep -oE '[^[:space:]]+$') + override_env_upper=$(echo "$trimmed_message" | awk '{print toupper($NF)}') + echo "THE_ENV=${override_env}" >> $GITHUB_ENV + echo "TARGET_ENV_LOWERCASE=${override_env}" >> $GITHUB_ENV + echo "TARGET_ENV_UPPERCASE=${override_env_upper}" >> $GITHUB_ENV + + - uses: hashicorp/setup-terraform@v3 + with: + terraform_version: 1.5.3 + + - name: Deploy Terraform + if: | + github.ref == 'refs/heads/develop' || + github.ref == 'refs/heads/main' || + startsWith(github.ref, 'refs/heads/release') || + github.event.head_commit.message == '/deploy sit' || + github.event.head_commit.message == '/deploy uat' + working-directory: terraform-deploy/ + env: + AWS_ACCESS_KEY_ID: ${{ secrets[format('AWS_ACCESS_KEY_ID_SERVICES_{0}', env.TARGET_ENV_UPPERCASE)] }} + AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_SERVICES_{0}', env.TARGET_ENV_UPPERCASE)] }} + AWS_ACCOUNT_ID: ${{ secrets[format('AWS_ACCOUNT_ID_SERVICES_{0}', env.TARGET_ENV_UPPERCASE)] }} + AWS_DEFAULT_REGION: us-west-2 + + TF_VAR_permissions_boundary_arn: ${{ secrets[format('PERMISSIONS_BOUNDARY_ARN_{0}', env.TARGET_ENV_UPPERCASE)] }} + TF_VAR_buckets_name: ${{ secrets[format('BUCKET_{0}', env.TARGET_ENV_UPPERCASE)] }} + TF_VAR_system_bucket: ${{ secrets[format('SYSTEM_BUCKET_{0}', env.TARGET_ENV_UPPERCASE)] }} + TF_VAR_dmrpp_url: ${{ secrets.DMRPP_URL }} + TF_VAR_aws_security_group_ids: ${{ secrets[format('SECURITY_GROUP_IDS_{0}', env.TARGET_ENV_UPPERCASE)] }} + + run: | + curl -L -o metadata-aggregator.zip https://github.com/podaac/cumulus-metadata-aggregator/releases/download/v8.6.0/cumulus-metadata-aggregator-8.6.0.zip + python3 override.py "${{ env.TARGET_ENV_LOWERCASE }}" + source bin/config.sh ${{ env.TARGET_ENV_LOWERCASE }} + terraform plan -var-file=tfvars/"${{ env.TARGET_ENV_LOWERCASE }}".tfvars -var="app_version=${{ env.the_version }}" -out="tfplan" + terraform apply -auto-approve tfplan diff --git a/.github/workflows/wait-for-pypi.py b/.github/workflows/wait-for-pypi.py new file mode 100644 index 0000000..0433f52 --- /dev/null +++ b/.github/workflows/wait-for-pypi.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +import sys +import tempfile +import logging +import subprocess + +import tenacity + +''' +Sometimes the package published to PyPi is not immediately available for download from the index. This script +simply repeatedly tries to download a specific version of a package from PyPI (or test.pypi) until it succeeds or +a limit is exceeded. +''' + + +@tenacity.retry( + wait=tenacity.wait_exponential(multiplier=1, min=4, max=10), + retry=tenacity.retry_if_exception_type(subprocess.CalledProcessError), + stop=tenacity.stop_after_delay(120), + before_sleep=tenacity.before_sleep_log(logging.getLogger(__name__), logging.DEBUG) +) +def download_package(package): + subprocess.check_call([sys.executable, '-m', + 'pip', '--isolated', '--no-cache-dir', + 'download', '--no-deps', '-d', tempfile.gettempdir(), '--index-url', + 'https://pypi.org/simple/', + '--extra-index-url', 'https://test.pypi.org/simple/', package + ]) + + +if __name__ == '__main__': + logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) + package_spec = sys.argv[1] + download_package(package_spec) \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7497dfd --- /dev/null +++ b/.gitignore @@ -0,0 +1,149 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +.idea +*.pyc +dist +*.egg-info +*.terraform +*.terraform.d +tfplan +.DS_Store +**__pycache__ + +functional_tests/*.log +functional_tests/*.txt + +.bash_history + +build-info.yaml +result_images +requirements.txt +snyk-report.json diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..43a9443 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,645 @@ +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked and +# will not be imported (useful for modules/projects where namespaces are +# manipulated during runtime and thus existing member attributes cannot be +# deduced by static analysis). It supports qualified module names, as well as +# Unix pattern matching. +ignored-modules= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Resolve imports to .pyi stubs if available. May reduce no-member messages and +# increase not-an-iterable messages. +prefer-stubs=no + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.10 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + asyncSetUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + use-implicit-booleaness-not-comparison-to-string, + use-implicit-booleaness-not-comparison-to-zero, + broad-exception-raised + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + +# Let 'consider-using-join' be raised when the separator to join on would be +# non-empty (resulting in expected fixes of the type: ``"- " + " - +# ".join(items)``) +suggest-join-with-non-empty-separator=yes + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..866722b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,238 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Added +### Deprecated +### Removed +### Fixed + + +## [0.9.0] + +### Added +- Updated regression and memory profiling scripts +- Updated CLI preview message to note that messages sent count might not be same as actual execution +- Moved repo to Github.com [hitide-backfill-tool](https://github.com/podaac/hitide-backfill-tool) + - Updated cumulus cluster template + - Made default_message_config.json an argument file + - Implemented Github Actions to Build and Deploy to AWS +### Deprecated +### Removed +### Fixed + + +## [0.8.1] + +### Added +- Updated to use Github.com [hitide-backfill-lambdas](https://github.com/podaac/hitide-backfill-lambdas) repo +- Updated cluster settings +### Deprecated +### Removed +- Removed using hitide-backfill-post-step and hitide-backfill-sqs-to-step +### Fixed + + +## [0.8.0] + +### Added +- Updated to terraform 1.5.3 +- Added Dmrpp Lambda and update to 5.0.1 +- Updated tig to 0.12.0 to allow terraform 1.5.3 update +- Updated forge to 0.11.0 to allow terraform 1.5.3 update +- Updated post to cmr and hyrax metadata updates lambda modules from cumulus 18.2.0 +- Updated postworkflow normalizer now it computes a workflow flag for ecs_lambda to help determine to use lambda or ecs based on a granule size. +- Updated to use Github.com [cumulus-postworkflow-normalizer](https://github.com/podaac/cumulus-postworkflow-normalizer) repo +### Deprecated +### Removed +### Fixed + + +## [0.7.0] + +### Added +- Added forge-py to generate footprints for specific collections +- Update to deploy +### Deprecated +### Removed +### Fixed + + +## [0.6.0] + +### Added +- Updated tig to 0.11.0 +- Updated to have a regression test to call backfilling on all forge/tig collections +- Added a sort order for cmr based on date +- Changed tig memory to 3.5 gb +- Added in a memory profiler for lambda functions maily for tig +- updated message_visibility_timeout to 18000 +### Deprecated +### Removed +### Fixed + + +## [0.5.1] + +### Added +- Updated tig to 0.10.0 +### Deprecated +### Removed +### Fixed + + +## [0.5.0] + +### Added +- Updated tig to 0.9.0 +- Updated forge 0.10.0 +- Updated backfill-post to 0.3.0 +- Updated backfill-sqs-to-step to 0.3.0 +- Updated how tig, forge, backfill_post_step, and backfill_sqs_to_step is deployed to each environment with override variables +- Added output granule count with both footprint and bbox +- Renamed monthly_counts to monthly_results. Now return full granule object instead of just the count +- Updated python dependency libraries +### Deprecated +### Removed +### Fixed + + +## [0.4.1] + +### Added +- Updated service uat disk space to from 100 GB to 600 GB +- Updated number of ec2 to 2 for dmrpp +- Added a replay script to move messages from dead letter queue to regular queue +- Updated number of ec2 servers to t3.micro and 50 with 50 gb each +- Updated python libraries +- Updated tig fargate memory to 16 gb +- Updated to cumulus 16.1.2 +- Updated CMA layer to 2.0.3 +- Updated forge 0.9.0 and tig 0.8.0 +- Updated library versions to latest to fix security issues +- Added cumulus normalizer to filter out unecessary data for forge tig dmrpp workflows +### Deprecated +### Removed +### Fixed + +## [0.4.0] + +### Added +- Updated the cmr request to retry on failures +- Updated Jenkins docker image to a snyk base image +- Added output granule concept IDs for missing images, footprints, and dmrpps +- Updated forge 0.8.0 and tig 0.7.0 +- Updated library versions to latest to fix security issues +### Deprecated +### Removed +### Fixed + +## [0.3.4] + +### Added +- Fix dmrpp workflow to keep revision id +- Add cycle parameter to cli +- Update Updated dmrpp-generator v4.0.9 +- Update tig to public 0.6.2 +- Update forge to public 0.7.5 +- Add a monthly count totals table output by Year-Month +### Deprecated +### Removed +### Fixed + + +## [0.3.3] + +### Added +- **PODAAC-5418** + - Add ability to backfil swot collections +- **PODAAC-5320** + - Add timeout to forge and tig fargate steps +- **PODAAC-5321** + - Allow tig and forge fargate to scale to 100 +- **PODAAC-5280** + - Check forge/tig configuration before backfilling collection to make sure we can for footprints +- **update aggregator** + - Updated cumulus-metadata-aggregator v8.4.0 +- **PODAAC-5274** + - update post step lambda to make raw sql query count +- **update_ebs** + - turn on docker debug mode + - update ops ebs to 600 gb + - change ops running container to 50 + - update jenkins to deploy to ops on changelog +- **update dmrpp-generator** + - Updated dmrpp-generator v4.0.7 +- **update tig and forge** + - Updated tig v0.5.0 + - Updated forge v0.7.0 +### Deprecated +### Removed +### Fixed +### Security + + +## [0.3.2] + +### Added +- **update_ebs + - update ebs volume type to gp3 and ops size to 400gb +### Deprecated +### Removed +### Fixed +### Security + + +## [0.3.1] + +### Added +- **PODAAC-5126** + - Added DMRPP file generation and CMR OpenDAP URL update capability +- **Fargate Changes** + - Added fargate terraform resources, added in forge and tig fargate +- **PODAAC-5128** + - Add DMRPP workflow. +- **PODAAC-5143** + - added ECS facility for docker image to run upon +- **PODAAC-5229** + - update post lambda to process dmrpp workflows +### Deprecated +### Removed +### Fixed +### Security + + +## [0.2.0] + +### Added +- **PODAAC-4881** + - Add script failed_workflow.py to find unique errors in step function workflows +- **Update jenkins** + - Update jenkins branch to have deploy sit and uat triggers + - Now using tig v0.4.0, forge v0.5.1, and cumulus-metadata-aggregator v8.1.0 +### Deprecated +### Removed +### Fixed +### Security + + +## [0.1.0] + +### Added +- **PODAAC-4425** + - Initial development of cli tool + - Includes backfill and create-backfill-config scripts +- **PODAAC-4424** + - Implementation of hitide backfill terraform infrastructure. +### Deprecated +### Removed +### Fixed +- **PODAAC-4771** + - Properly parses s3 bucket information from s3 urls that + contain multiple directories in the path. +### Security diff --git a/LICENSE b/LICENSE index 261eeb9..218c4a7 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright 2024 California Institute of Technology Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index bc71ed6..fa06588 100644 --- a/README.md +++ b/README.md @@ -1 +1,54 @@ -# hitide-backfill-tool \ No newline at end of file +# hitide-backfill-tool + +Tool to backfill thumbnail images and footprints for POCLOUD datasets + +Some granules have been ingested without creating footprints/thumbnail images. The purpose of this tool is to trigger part of Cumulus workflow to generate footprints and images for granules that need it. + +## What it does in a nutshell + +- You specify search parameters at command line (collection, start_date, end_date, footprint, image, etc) +- Backfill-Tool searches CMR for matching granules +- Backfill-Tool figures out if the granule needs a footprint or image +- If footprint or image generation is needed, Backfill-Tool creates a Cumulus message and sends it to an AWS SNS topic. +- From there, another service will run trigger Forge/TIG and update CMR with new images/footprints as needed + +## Prerequisites + +- Python > 3.10 +- poetry + +## failed_workflow.py + +- Script used to scan failed workflows and get unique errors +- Takes in three arguments + - workflow_arn: arn of aws workflow + - profile_name: aws profile name credential to use + - limit: how many of latest execution to scan if not specified will go through all failed executions +- ex: python failed_workflow.py --workflow_arn arn:aws:states:us-west-2:123456:stateMachine:podaac-services-ops-hitide-backfill-forge --profile_name service_ops --limit 1000 + + +## replay.py +- Script used to get messages off dead letter queue and back into regular queue +- Takes 1 argument + - config: configuration that has the aws_profile, dlq_url, and sqs_url +- ex: replay --config config.cfg + +## regression.py +- Script to run backfill tool command on all collection that has a forge-tig configuartion file +- Script can be modify to exclude or test specific collections + +## memory_profiler.py +- Script to run profile the memory use of lambdas, currently only tig is being profiled +- Lambdas need to be modified to include lambda request id in cloudwatch logs +- Modify script with cloudwatch lambda to profile +- Modify script to include start time and end time range where cloudwatch events were logged + +## ECS facility + +- ECS template to start docker : ecs_cluster_instance_autoscaling_cf_template.yml.tmpl +- ECS script to execute task : task-reaper.sh +- All ECS related resources are specified in ecs_cluster.tf +- ECS is a cluster of EC2 instances. While creating the EC2 instances, a key is given to create each EC2 and the key name is specified as key_name variable within variables.tf. At this moment, the following keys are specified for each environment + - backfill-tool-sit-cluster-keypair (SIT) + - backfill-tool-uat-cluster-keypair (UAT) + - backfill-tool-ops-cluster-keypair (OPS) \ No newline at end of file diff --git a/podaac/hitide_backfill_tool/__init__.py b/podaac/hitide_backfill_tool/__init__.py new file mode 100644 index 0000000..f9f97e9 --- /dev/null +++ b/podaac/hitide_backfill_tool/__init__.py @@ -0,0 +1,5 @@ +"""Modules for searching granules, creating & sending messages for those +granules, and coordinating the whole process. +""" + +__version__ = '0.1.0' diff --git a/podaac/hitide_backfill_tool/args.py b/podaac/hitide_backfill_tool/args.py new file mode 100644 index 0000000..5c324a9 --- /dev/null +++ b/podaac/hitide_backfill_tool/args.py @@ -0,0 +1,105 @@ +"""Parse cli args""" + +import sys +from argparse import ArgumentParser, Namespace +from podaac.hitide_backfill_tool.file_util import load_yaml_file + + +def merge_dicts(defaults, config, cli): + """Return dict that is merge of default config values, config file values and cli args""" + keys = {**cli, **config, **defaults}.keys() + output = {} + for key in keys: + output[key] = ((cli.get(key) is not None and cli.get(key)) or + (config.get(key) is not None and config.get(key)) or + defaults.get(key)) + return output + + +default_config = { + "footprint": "on", + "image": "on", + "dmrpp": "off", + "dmrpp_min_version": "3.21.0-272", # Important: Update this version when updating the + # backend dmrpp_generator + "preview": False, + "use_data_url": False, + "page_size": 2000, + "geometries": ["GPolygons", "Lines"], + "log_level": "INFO" +} + + +def create_parser(): + """Create a argparse parser for the backfill cli""" + + parser = ArgumentParser() + parser.add_argument("--config") + + parser.add_argument("--cmr", choices=["ops", "uat", "sit"]) + parser.add_argument("-c", "--collection") + parser.add_argument("--provider") + parser.add_argument("-sd", "--start-date") + parser.add_argument("-ed", "--end-date") + parser.add_argument("--page-size", type=int) + parser.add_argument("--page-limit", type=int) + parser.add_argument("--edl-token") + parser.add_argument("--launchpad-token") + parser.add_argument("--cmr-search-after") + + parser.add_argument("-g", "--geometry", dest="geometries", + action="append", default=None) + parser.add_argument("--footprint", choices=["on", "off", "force"]) + parser.add_argument("--image", choices=["on", "off", "force"]) + parser.add_argument("--dmrpp", choices=["on", "off", "force"]) + parser.add_argument("--dmrpp-min-version") + parser.add_argument("--use-data-url", action="store_true", default=None) + + parser.add_argument("--cumulus", choices=["ops", "uat", "sit", + "swot-sit", "swot-uat", "swot-ops"]) + parser.add_argument("--cumulus-configurations") + + parser.add_argument("--preview", action="store_true", default=None) + parser.add_argument("--sns-arn") + parser.add_argument("--aws-profile") + parser.add_argument("--message_file") + parser.add_argument("--message-limit", type=int) + parser.add_argument("--user") + + parser.add_argument("--log-file") + parser.add_argument("--log-level") + + parser.add_argument('--cycles', type=str, help='List of cycles or a single cycle', default=None) + parser.add_argument('--sort-order', type=str, help="cmr search start date sorting order", + choices=["descending", "ascending"]) + + parser.add_argument("--default_message_config", type=str, + help="defaut message config to construct messages", default=None) + + return parser + + +def parse_args(args=None): + """Return argparse namespace with merged config values (defaults + config_file + cli_args) + + Args calculated from input string, string array, or if neither is provided, from sys.argv""" + + if args is None: + args = sys.argv[1:] + elif isinstance(args, str): + args = args.split() + + parser = create_parser() + args = parser.parse_args(args) + config = {} + if args.config: + config = load_yaml_file(args.config) + + args = vars(args) + merged_dict = merge_dicts(default_config, config, args) + merged_config = Namespace(**merged_dict) + + if merged_config.default_message_config is None: + raise Exception("please specify path to default message config") + + return merged_config diff --git a/podaac/hitide_backfill_tool/cli.py b/podaac/hitide_backfill_tool/cli.py new file mode 100644 index 0000000..97027e4 --- /dev/null +++ b/podaac/hitide_backfill_tool/cli.py @@ -0,0 +1,526 @@ +"""Script for backfilling granule images and footprints""" + +# pylint: disable=line-too-long + +from concurrent.futures import ThreadPoolExecutor +from multiprocessing import Lock +import logging +import sys +import uuid +import copy +import json +from datetime import datetime, timezone +import requests + +from podaac.hitide_backfill_tool.cmr.search import GranuleSearch +from podaac.hitide_backfill_tool.cmr.cmr_granule import CmrGranule +from podaac.hitide_backfill_tool.cmr.helpers import cmr_base_url +from podaac.hitide_backfill_tool.cnm_message_writer import CnmMessageWriter +from podaac.hitide_backfill_tool.sns_message_sender import SnsMessageSender, FileMessageSender +from podaac.hitide_backfill_tool.config import get_collection_config, get_message_config +from podaac.hitide_backfill_tool.args import parse_args +from podaac.hitide_backfill_tool.file_util import make_absolute +from podaac.hitide_backfill_tool.dmrpp_utils import parse_version, DmrppState +from podaac.hitide_backfill_tool.s3_reader import S3Reader + + +def logger_from_args(args): + """Return configured logger from parsed cli args.""" + + if args.log_file: + logging.basicConfig(filename=make_absolute(args.log_file)) + logger = logging.getLogger("backfill") + logger.setLevel(getattr(logging, args.log_level)) + logger.addHandler(logging.StreamHandler(sys.stdout)) + return logger + + +def object_to_str(obj): + """Return formatted string, given a python object.""" + + vars_dict = vars(obj) + vars_string = "" + for key in vars_dict.keys(): + vars_string += f" {key} -> {vars_dict[key]}\n" + return vars_string + + +def safe_log_args(logger, args): + """Log the parsed cli args object without showing tokens.""" + + args_copy = copy.copy(args) + if args_copy.edl_token: + args_copy.edl_token = "********" + if args_copy.launchpad_token: + args_copy.launchpad_token = "********" + logger.debug(f"\nCLI args:\n{object_to_str(args_copy)}\n") + + +def granule_search_from_args(args, logger): + """Return configured GranuleSearch object from parsed cli args and logger.""" + + return GranuleSearch( + base_url=cmr_base_url(args.cmr), + collection_short_name=args.collection, + provider=args.provider, + start_date=args.start_date, + end_date=args.end_date, + page_size=args.page_size, + page_limit=args.page_limit, + logger=logger, + edl_token=args.edl_token, + launchpad_token=args.launchpad_token, + cmr_search_after=args.cmr_search_after, + cycles=args.cycles, + sort_order=args.sort_order + ) + + +def message_writer_from_args(args, logger): + """Return configured message writer from parsed cli args and logger.""" + + message_config = get_message_config(args.cumulus, args.default_message_config) + collection_config = get_collection_config( + args.cumulus_configurations, args.collection, args.cumulus, logger) + message_writer = CnmMessageWriter(message_config, collection_config, + args.start_date, args.end_date, args.provider, args.cli_execution_id, args.user) + return message_writer + + +def message_senders_from_args(args, logger): + """Return list of configured message senders from parsed cli args and logger.""" + + message_senders = [] + + if args.message_file and not args.preview: + file_message_sender = FileMessageSender(args.message_file) + message_senders.append(file_message_sender) + if args.sns_arn and not args.preview: + message_senders.append(SnsMessageSender( + topic_arn=args.sns_arn, + aws_profile=args.aws_profile, + logger=logger + )) + return message_senders + + +def granule_options_from_args(args): + """Return kwargs dict will be passed to CmrGranule constructor along with granule umm_json.""" + + return { + "footprint_geometries": args.geometries, + "footprint_processing": args.footprint, + "image_processing": args.image, + "dmrpp_processing": args.dmrpp, + "dmrpp_min_version": parse_version(args.dmrpp_min_version), + "can_use_data_url_for_s3_bucket_info": args.use_data_url + } + + +class Backfiller: + """Perform a backfill operation""" + + # Disable broad-except since many types of error indicate the absense + # of data when attempting access (e.g. TypeError, IndexError, KeyError, ...) + # pylint: disable=broad-except + + # pylint: disable=too-many-instance-attributes,too-many-arguments + + def __init__(self, search, message_writer, message_senders, granule_options, logger, + message_limit, cli_execution_id, s3, collection): + # pylint: disable=C0103 + + # dependencies + self.search = search + self.message_writer = message_writer + self.message_senders = message_senders + self.granule_options = granule_options + self.logger = logger + self.message_limit = message_limit + self.cli_execution_id = cli_execution_id + self.s3 = s3 + self.collection = collection + + # statistics + self.granules_analyzed = 0 + self.granule_range_start = None + self.granule_range_end = None + self.footprints_that_couldnt_be_processed = 0 + self.images_that_couldnt_be_processed = 0 + self.dmrpp_that_couldnt_be_processed = 0 + self.granules_needing_footprint = 0 + self.granules_needing_image = 0 + self.granules_needing_dmrpp = 0 + self.granules_with_footprint_and_bbox = 0 + self.footprint_messages_sent = 0 + self.image_messages_sent = 0 + self.dmrpp_messages_sent = 0 + self.monthly_results = {} + self.concept_ids_needing_image = [] + self.concept_ids_needing_footprint = [] + self.concept_ids_needing_dmrpp = [] + + # dmrpp status + self.dmrpp_unprocessed = 0 + self.dmrpp_missing_version = 0 + self.dmrpp_update_cmr_opendap = 0 + self.dmrpp_older_version = 0 + self.dmrpp_newer_version = 0 + + # forge-tig configuration + self.forge_tig_configuration = None + + # destination_message used in logging + destination_message = [] + for message_sender in message_senders: + destination_message.append(message_sender.name) + if len(destination_message) == 0: + destination_message.append('nowhere') + self.destination_message = f"Messages being sent to {', '.join(destination_message)}" + + # for thread-safe operations + self.lock = Lock() + + def process_granules(self): + """Loop through granules (in parallel) from granule-search and call the process_one_granule() method.""" + + while self.search.get_next_page(): + print("Processing granules...", end='', flush=True) + with ThreadPoolExecutor() as executor: + executor.map(self.process_one_granule, self.search.granules()) + print("done.") + if self.message_limit_reached(): + self.logger.info("\n**** Message limit reached ****") + return + self.log_stats() + + def print_monthly_results_table(self): + """Function to print out monthly stats""" + + if not self.message_senders: + print("** NOTE: When in preview mode, the messages sent count may not be accurate since it's only simulating sending messages. ** \n") + + print("Monthly Counts Summary:\n") + header = f"{'Date':<10} {'Granules':<10} {'Need Image':<12} {'Need Footprint':<16} {'Both FP & BBox':<16} {'Need DMRPP':<12}" + + print(header) + + current_year = None + + separator = "========== ========== ============ ================ ================ ============" + + for date, result in self.monthly_results.items(): + date_obj = datetime.strptime(date, "%Y-%m") + month_name = date_obj.strftime("%b") + + # Check if the year has changed + year = date_obj.year + if year != current_year: + print(separator) + current_year = year + + row = f"{date[:4]}-{month_name:<5} {len(result['granules']):<10} {result['needs_image']:<12} {result['needs_footprint']:<16} {result['both_footprint_and_bbox']:<16} {result['needs_dmrpp']:<12}" # noqa + print(row) + print() + + def process_one_granule(self, umm_granule): + """Create and send messages for one granule. Thread-safe method using lock.""" + try: + if self.message_limit_reached(): + return + + granule = CmrGranule(umm_granule, self.s3, **self.granule_options) + + with self.lock: + if not self.granule_range_start: + self.granule_range_start = granule.start_date() + self.granule_range_end = granule.end_date() + + date = granule.start_date()[:7] + with self.lock: + if date not in self.monthly_results: + self.monthly_results[date] = { + 'granules': [copy.deepcopy(granule.umm_granule)], + 'needs_image': 0, + 'needs_footprint': 0, + 'both_footprint_and_bbox': 0, + 'needs_dmrpp': 0 + } + else: + self.monthly_results[date]['granules'].append(copy.deepcopy(granule.umm_granule)) + + # footprint + if granule.needs_footprint(): + self.update_footprint(granule) + + # image + if granule.needs_image(): + self.update_image(granule) + + # both bbox and footprint + if granule.has_footprint_and_bbox(): + with self.lock: + self.granules_with_footprint_and_bbox += 1 + + self.monthly_results[date]['both_footprint_and_bbox'] += 1 + + # dmrpp + if self.granule_options['dmrpp_processing'] == "force": + self.update_dmrpp(granule) + elif self.granule_options['dmrpp_processing'] == "on": + self.check_dmrpp(granule) + + with self.lock: + self.granules_analyzed += 1 + except Exception as exc: + self.logger.error(f"Error: {str(exc)}\n") + + def update_image(self, granule): + """Create and send messages for one granule's image update.""" + + with self.lock: + self.granules_needing_image += 1 + self.concept_ids_needing_image.append(granule.concept_id()) + + date = granule.start_date()[:7] + self.monthly_results[date]['needs_image'] += 1 + if granule.s3_bucket_info(): + if not self.message_limit_reached(): + with self.lock: + self.image_messages_sent += 1 + message = self.message_writer.write(granule, needs_footprint=False, + needs_image=True, needs_dmrpp=False, + skip_cmr_opendap_update=True) + for sender in self.message_senders: + sender.send(message) + else: + with self.lock: + self.images_that_couldnt_be_processed += 1 + raise Exception( + f"Could not process image for granule {granule.native_id()} because of missing S3 " + f"bucket info") + + def update_footprint(self, granule): + """Create and send messages for one granule's footprint update.""" + + with self.lock: + self.granules_needing_footprint += 1 + self.concept_ids_needing_footprint.append(granule.concept_id()) + + date = granule.start_date()[:7] + self.monthly_results[date]['needs_footprint'] += 1 + if granule.s3_bucket_info(): + if not self.message_limit_reached(): + with self.lock: + self.footprint_messages_sent += 1 + message = self.message_writer.write(granule, needs_footprint=True, + needs_image=False, needs_dmrpp=False, + skip_cmr_opendap_update=True) + for sender in self.message_senders: + sender.send(message) + else: + with self.lock: + self.footprints_that_couldnt_be_processed += 1 + raise Exception( + f"Could not process footprint for granule {granule.native_id()} because of " + f"missing S3 bucket info") + + def check_dmrpp(self, granule): + """Check if dmrpp needs updating based on the dmrpp file state, and update if so.""" + + s3_bucket_info = granule.s3_bucket_info() + if s3_bucket_info: + dmrpp_state = granule.get_dmrpp_state(f's3://{s3_bucket_info["bucket"]}' + f'/{s3_bucket_info["key"]}.dmrpp') + if dmrpp_state == DmrppState.OLDER_VERSION: + self.update_dmrpp(granule) + with self.lock: + self.dmrpp_older_version += 1 + elif dmrpp_state == DmrppState.MISSING_VERSION: + self.update_dmrpp(granule) + with self.lock: + self.dmrpp_missing_version += 1 + elif dmrpp_state == DmrppState.MATCHED_VERSION: + with self.lock: + self.dmrpp_unprocessed += 1 + elif dmrpp_state == DmrppState.NEWER_VERSION: + with self.lock: + self.dmrpp_newer_version += 1 + else: + with self.lock: + self.dmrpp_that_couldnt_be_processed += 1 + raise Exception( + f"Could not process dmrpp for granule {granule.native_id()} because of " + f"missing S3 bucket info") + + def update_dmrpp(self, granule): + """Create and send messages for one granule's dmrpp update.""" + + with self.lock: + self.granules_needing_dmrpp += 1 + self.concept_ids_needing_dmrpp.append(granule.concept_id()) + + date = granule.start_date()[:7] + self.monthly_results[date]['needs_dmrpp'] += 1 + if granule.s3_bucket_info(): + with self.lock: + skip_cmr_opendap_update = granule.has_opendap_url() + if not skip_cmr_opendap_update: + self.dmrpp_update_cmr_opendap += 1 + if not self.message_limit_reached(): + with self.lock: + self.dmrpp_messages_sent += 1 + message = self.message_writer.write(granule, needs_footprint=False, + needs_image=False, needs_dmrpp=True, + skip_cmr_opendap_update=skip_cmr_opendap_update) + for sender in self.message_senders: + sender.send(message) + else: + with self.lock: + self.dmrpp_that_couldnt_be_processed += 1 + raise Exception( + f"Could not process dmrpp for granule {granule.native_id()} because of missing S3 " + f"bucket info") + + def log_stats(self): + """Log info about backfilling process""" + self.logger.info( + "\n==============================================================\n" + f"Execution id: {self.cli_execution_id}\n" + f"Matching granules: {self.search.total_matching_granules()}\n" + f"Granules analyzed: {self.granules_analyzed}\n" + f" in time range: {self.granule_range_start or '-'} to {self.granule_range_end or '-'}\n\n" + + f"{self.granules_needing_footprint} granules need footprints\n" + f"{self.footprints_that_couldnt_be_processed} footprints couldn't be processed because of missing s3 info\n" + f"{self.footprint_messages_sent} footprint messages were sent\n\n" + + f"{self.granules_needing_image} granules need images\n" + f"{self.images_that_couldnt_be_processed} images couldn't be processed because of missing s3 info\n" + f"{self.image_messages_sent} image messages were sent\n\n" + + f"{self.granules_with_footprint_and_bbox} granules with both footprint and bbox\n" + ) + if self.granule_options['dmrpp_processing'] == "on" or self.granule_options['dmrpp_processing'] == "force": + self.logger.info( + f"{self.granules_needing_dmrpp} granules need dmrpp\n" + f"{self.dmrpp_that_couldnt_be_processed} dmrpp couldn't be processed because of missing s3 info\n" + f"{self.dmrpp_messages_sent} dmrpp messages were sent\n" + ) + if self.granule_options['dmrpp_processing'] == "on": + self.logger.info( + f" dmrpp details:\n" + f" {self.dmrpp_unprocessed} unprocessed\n" + f" {self.dmrpp_newer_version} with newer version\n" + f" {self.dmrpp_missing_version} missing version\n" + f" {self.dmrpp_older_version} with older version\n" + f" {self.dmrpp_update_cmr_opendap} missing cmr opendap url\n" + ) + self.logger.info( + f"-- {self.destination_message} --\n" + "==============================================================\n" + ) + if len(self.concept_ids_needing_image) > 0: + self.logger.info(f"Granule IDs needing images (showing first 100):\n" + f" {self.concept_ids_needing_image[:100]}\n" + ) + if len(self.concept_ids_needing_footprint) > 0: + self.logger.info(f"Granule IDs needing footprints (showing first 100):\n" + f" {self.concept_ids_needing_footprint[:100]}\n" + ) + if len(self.concept_ids_needing_dmrpp) > 0: + self.logger.info(f"Granule IDs needing dmrpp (showing first 100):\n" + f" {self.concept_ids_needing_dmrpp[:100]}\n" + ) + self.print_monthly_results_table() + + def message_limit_reached(self): + """Returns True if there is a message limit and it has been reached, otherwise False""" + if self.message_limit is None: + return False + return self.footprint_messages_sent + self.image_messages_sent + self.dmrpp_messages_sent >= self.message_limit + + def get_forge_tig_configuration(self): + """Function to get forge tig configuration of a collection""" + + config_url = "https://hitide.podaac.earthdatacloud.nasa.gov/dataset-configs/" + collection_url = f"{config_url}{self.collection}.cfg" + result = requests.get(collection_url, timeout=120) + if result.status_code == 200: + self.forge_tig_configuration = json.loads(result.content) + else: + self.forge_tig_configuration = None + + +def main(args=None): + """Main script for backfilling from the cli""" + + # Disable pylint broad-except - So that a user friendly message can be displayed. Only used at top level + # Disable pylint bare-except - So that after ctrl-C, a final status message can be logged. Only used at top level + # pylint: disable=broad-except,bare-except + + # load args + args = parse_args(args) + args.cli_execution_id = str(uuid.uuid4()) + + # setup dependencies + try: + logger = logger_from_args(args) + + logger.info(f"Started backfill: " # pylint: disable=W1203 + f"{datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S %Z')}") + + safe_log_args(logger, args) + search = granule_search_from_args(args, logger) + message_writer = message_writer_from_args(args, logger) + message_senders = message_senders_from_args(args, logger) + granule_options = granule_options_from_args(args) + s3 = S3Reader(logger, args.aws_profile) # pylint: disable=C0103 + collection = args.collection + except Exception as exc: + logger.error(f"Error: {str(exc)}\n") + return + + # setup backfiller + backfiller = Backfiller(search, message_writer, message_senders, + granule_options, logger, args.message_limit, args.cli_execution_id, s3, collection) + + # Check forge configurations before running backfill + backfiller.get_forge_tig_configuration() + + if granule_options['footprint_processing'] != "off": + if backfiller.forge_tig_configuration is None: + raise Exception("There is no footprint settings for this collection, please disable footprint for backfilling") + footprint_settings = backfiller.forge_tig_configuration.get('footprint') + if not footprint_settings: + raise Exception("There is no footprint settings for this collection, please disable footprint for backfilling") + + if granule_options['dmrpp_processing'] != "off": + files = message_writer.collection_config.get('files', []) + has_dmrpp_regex = False + for file in files: + if file.get('regex', "").endswith(".dmrpp$"): + has_dmrpp_regex = True + break + if has_dmrpp_regex is False: + raise Exception(f"There is no DMRPP regex in cumulus collection configuration for {message_writer.collection_name}") + + # run backfiller + try: + backfiller.process_granules() + except Exception as exc: + logger.error(exc) + except: # noqa: E722 - to catch ctrl-C + logger.warning("keyboard interrupt") + + # close things up + for message_sender in message_senders: + message_sender.close() + + backfiller.log_stats() + + logger.info(f"Finished backfill: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S %Z')}") # pylint: disable=W1203 + + +if __name__ == "__main__": + main() diff --git a/podaac/hitide_backfill_tool/cmr/__init__.py b/podaac/hitide_backfill_tool/cmr/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/podaac/hitide_backfill_tool/cmr/cmr_granule.py b/podaac/hitide_backfill_tool/cmr/cmr_granule.py new file mode 100644 index 0000000..141a7e2 --- /dev/null +++ b/podaac/hitide_backfill_tool/cmr/cmr_granule.py @@ -0,0 +1,234 @@ +"""Extract information from CMR umm_json formatted granule metadata.""" +from urllib.parse import urlparse +from podaac.hitide_backfill_tool.dmrpp_utils import get_dmrpp_version, parse_version, DmrppState +from podaac.hitide_backfill_tool.args import default_config + + +class CmrGranule: + """Extracts information from CMR umm_json formatted granule metadata.""" + + # Disable broad-except since many types of error indicate the absense + # of data when attempting access (e.g. TypeError, IndexError, KeyError, ...) + # pylint: disable=broad-except + + # pylint: disable=too-many-instance-attributes + # pylint: disable-next=too-many-arguments + def __init__(self, + umm_granule, + s3=None, + footprint_geometries=None, + can_use_data_url_for_s3_bucket_info=False, + image_processing="on", + footprint_processing="on", + dmrpp_processing="off", + dmrpp_min_version=parse_version(default_config["dmrpp_min_version"])): + """Create the CmrGranule object from granule and settings.""" + # pylint: disable=C0103 + + self.umm_granule = umm_granule + self.s3 = s3 + self.footprint_geometries = footprint_geometries or ["GPolygons", "Lines"] + self.can_use_data_url_for_s3_bucket_info = can_use_data_url_for_s3_bucket_info + self.image_processing = image_processing + self.footprint_processing = footprint_processing + self.dmrpp_processing = dmrpp_processing + self.dmrpp_min_version = dmrpp_min_version + + def has_footprint(self): + """Returns True if granule has footprint, otherwise False.""" + try: + granule_geometries = self.umm_granule["umm"]["SpatialExtent"]["HorizontalSpatialDomain"]["Geometry"] # pylint: disable=line-too-long + for geometry_name in granule_geometries: + geometry = granule_geometries[geometry_name] + if geometry_name in self.footprint_geometries and isinstance(geometry, list) and len(geometry) > 0: # pylint: disable=line-too-long + return True + except Exception: + pass + return False + + def has_footprint_and_bbox(self): + """Returns True if granule has footprint and bounding rectangle, otherwise False.""" + try: + granule_geometries = self.umm_granule["umm"]["SpatialExtent"]["HorizontalSpatialDomain"]["Geometry"] # pylint: disable=line-too-long + if 'BoundingRectangles' in granule_geometries: + for geometry_name in granule_geometries: + geometry = granule_geometries[geometry_name] + if geometry_name in self.footprint_geometries and isinstance(geometry, list) and len(geometry) > 0: # pylint: disable=line-too-long + return True + except Exception: + pass + return False + + def needs_footprint(self): + """Returns True if granule needs to have a footprint generated, otherwise False.""" + + return self.footprint_processing == "force" or (self.footprint_processing == "on" and not self.has_footprint()) # pylint: disable=line-too-long + + def has_image(self): + """Returns True if the granule has a thumbnail image link, otherwise False.""" + + try: + urls = self.umm_granule["umm"]["RelatedUrls"] + for url in urls: + if url["Type"] == "GET RELATED VISUALIZATION": + return True + except Exception: + pass + return False + + def has_opendap_url(self): + """Returns True if the granule has an OpenDAP URL link, otherwise False.""" + + try: + url = self.opendap_url() + + if url: + return True + except Exception: + pass + + return False + + def get_dmrpp_state(self, s3_dmrpp_url): + """Returns DmrppState of the granule's dmrpp file. It downloads the dmrpp file from the + S3 bucket and checks the version against the dmrpp_min_version. Returns one of four + different possible states.""" + + state = DmrppState.MISSING_VERSION + + try: + dmrpp_version = get_dmrpp_version(self.s3, s3_dmrpp_url) + + if dmrpp_version != "": + version = parse_version(dmrpp_version) + if version < self.dmrpp_min_version: + state = DmrppState.OLDER_VERSION + elif version > self.dmrpp_min_version: + state = DmrppState.NEWER_VERSION + else: + state = DmrppState.MATCHED_VERSION + except Exception: + pass + return state + + def needs_image(self): + """Returns True if the granule needs to have thumbnail images generated, otherwise False.""" + + return self.image_processing == "force" or (self.image_processing == "on" and not self.has_image()) # pylint: disable=line-too-long + + def native_id(self): + """Returns the native_id.""" + + return self.umm_granule["meta"]["native-id"] + + def concept_id(self): + """Returns the concept_id.""" + + return self.umm_granule["meta"]["concept-id"] + + def s3_url(self): + """Returns a link to the granule in S3 if provided, otherwise returns None.""" + + try: + urls = self.umm_granule["umm"]["RelatedUrls"] + for url in urls: + if url["Type"] == "GET DATA VIA DIRECT ACCESS" and "s3://" in url["URL"]: + return url["URL"] + except Exception: + pass + return None + + def opendap_url(self): + """Returns an OpenDAP link to the granule if provided, otherwise returns None.""" + + try: + urls = self.umm_granule["umm"]["RelatedUrls"] + for url in urls: + if "Subtype" in url and url["Subtype"] == "OPENDAP DATA" and "opendap" in \ + url["URL"]: + return url["URL"] + except Exception: + pass + return None + + def start_date(self): + """Returns the start_date if provided, otherwise returns None.""" + + try: + return self.umm_granule["umm"]["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"] + except Exception: + return None + + def end_date(self): + """Returns the end_date if provided, otherwise returns None.""" + + try: + return self.umm_granule["umm"]["TemporalExtent"]["RangeDateTime"]["EndingDateTime"] + except Exception: + return None + + def raw(self): + """Returns the raw umm_json formatted granule metadata.""" + + return self.umm_granule + + def data_url(self): + """Returns the http link to the granule file if provided, otherwise returns None.""" + + try: + urls = self.umm_granule["umm"]["RelatedUrls"] + for url in urls: + if url["Type"] == "GET DATA" and "https://" in url["URL"]: + return url["URL"] + except Exception: + pass + return None + + def size(self, filename): + """Returns the size of the data granule""" + umm = self.umm_granule.get('umm', {}) + data_granule = umm.get('DataGranule', {}) + files = data_granule.get('ArchiveAndDistributionInformation', []) + file = next((file for file in files if file.get('Name') == filename), None) + return file.get('SizeInBytes', 0) if file else 0 + + def s3_bucket_info(self): + """Returns the S3 bucket name, key, and the filename for the granule. + + Example return value for granule with s3://bucket-name/directory1/directory2/filename.nc + { + "bucket": "bucket-name", + "key": "directory1/directory2/filename.nc", + "filename": "filename.nc" + } + + This information will come from the S3 bucket link if provided, or from + the data url if 'can_use_data_url_for_s3_bucket_info' == True. + Otherwise returns None. + """ + try: + if self.s3_url(): + # Assume s3 url with structure -> + # s3://bucket-name/directory1[/directory2]/filename.nc + parsed = urlparse(self.s3_url(), allow_fragments=False) + return { + "bucket": parsed.netloc, + "key": parsed.path.lstrip('/'), + "filename": parsed.path.split('/')[-1] + } + + if self.can_use_data_url_for_s3_bucket_info and self.data_url(): + # Assume data url with structure -> + # https://hostname.com/bucket-name/directory1[/directory2]/filename.nc + path = self.data_url().partition("//")[2].partition('/')[2] + bucket, _, key = path.partition('/') + filename = key.rpartition('/')[2] + + return { + "bucket": bucket, + "key": key, + "filename": filename + } + except Exception: + pass + return None diff --git a/podaac/hitide_backfill_tool/cmr/helpers.py b/podaac/hitide_backfill_tool/cmr/helpers.py new file mode 100644 index 0000000..29ab22f --- /dev/null +++ b/podaac/hitide_backfill_tool/cmr/helpers.py @@ -0,0 +1,13 @@ +"""Helper functions for dealing with CMR search""" + + +def cmr_base_url(env): + """Given an environment name, return the CMR base url""" + + if env == "ops": + return "https://cmr.earthdata.nasa.gov" + if env == "uat": + return "https://cmr.uat.earthdata.nasa.gov" + if env == "sit": + return "https://cmr.sit.earthdata.nasa.gov" + raise ValueError("Improper environment specified: " + str(env)) diff --git a/podaac/hitide_backfill_tool/cmr/search.py b/podaac/hitide_backfill_tool/cmr/search.py new file mode 100644 index 0000000..de5fd18 --- /dev/null +++ b/podaac/hitide_backfill_tool/cmr/search.py @@ -0,0 +1,210 @@ +"""Search for CMR granules""" + +import ast +import json +import logging +from requests import Session +from requests.exceptions import RequestException +from requests.adapters import HTTPAdapter +from urllib3.util.retry import Retry +from .cmr_granule import CmrGranule + + +class GranuleSearch: + """Searches for CMR granules, with paging""" + + # pylint: disable=too-many-instance-attributes,too-many-arguments,too-many-locals + + def __init__(self, + base_url, + collection_short_name, + start_date=None, + end_date=None, + provider='pocloud', + edl_token=None, + launchpad_token=None, + page_size=2000, + page_limit=5, + logger=logging, + cmr_search_after=None, + cycles=None, + sort_order="ascending"): + """Create GranuleSearch object""" + + self._base_url = base_url + self._collection_short_name = collection_short_name + self._start_date = start_date + self._end_date = end_date + self._provider = provider + self._edl_token = edl_token + self._launchpad_token = launchpad_token + self._page_size = page_size + self._page_limit = page_limit + self._cmr_search_after = cmr_search_after + self._granules = [] + self._total_matching_granules = 0 + self._pages_loaded = 0 + self._logger = logger + self.cycles = cycles + + if sort_order == "descending": + self.sort_order = "-start_date" + else: + self.sort_order = "start_date" + + if sort_order == "descending": + self.sort_order = "-start_date" + else: + self.sort_order = "start_date" + + retry = Retry(connect=10, backoff_factor=0.5) + adapter = HTTPAdapter(max_retries=retry) + + self.session = Session() + self.session.mount('http://', adapter) + self.session.mount('https://', adapter) + + def granule_generator(self): + """Return iterable that provides all matching granules across multipl pages""" + + while not self.is_done(): + self.get_next_page() + if len(self.granules()) == 0: + return "Problem reading granules" + for granule in self.granules(): + yield CmrGranule(granule) + return "Finished reading granules" + + def get_next_page(self): + # pylint: disable=too-many-branches + # pylint: disable=too-many-statements + """Retrieve the first or next page of granule search results and + return True if granules received, False otherwise + """ + + if self.is_done(): + self._logger.warning( + "-- trying to get_next_page after search is done --") + self._granules = [] + return False + + url = (f"{self._base_url}/search/granules.umm_json?provider={self._provider}" + f"&page_size={self._page_size}&sort_key[]={self.sort_order}") + url += f"&short_name={self._collection_short_name}" + url += _temporal_param(self._start_date, self._end_date) + + if self.cycles: + try: + cycle_list = ast.literal_eval(self.cycles) + cycles_output = "" + if isinstance(cycle_list, list): + for cycle in cycle_list: + cycles_output += f"cycle[]={cycle}&" + cycles_output = cycles_output[:-1] + else: + cycles_output = f"cycle[]={cycle_list}" + except (ValueError, SyntaxError): + print("Invalid input. Please provide a valid list or a single number.") + + url += f"&{cycles_output}" + + headers = {} + if self._cmr_search_after: + headers["cmr-search-after"] = self._cmr_search_after + if self._edl_token: + headers["Authorization"] = f"Bearer {self._edl_token}" + elif self._launchpad_token: + headers["Authorization"] = self._launchpad_token + + if self._pages_loaded == 0: + print("\nRequesting first CMR page...", end='', flush=True) + else: + print("Requesting next CMR page...", end='', flush=True) + + body = {} + try: + response = self.session.get(url, headers=headers) + response.raise_for_status() + + body = json.loads(response.text) + except RequestException as exc: + self._logger.error(f"Error requesting CMR: {exc}") + except json.JSONDecodeError as exc: + self._logger.error(f"Error decoding CMR JSON response: {exc}") + + # Error message if there is a problem + if response.status_code >= 400 or body.get("hits") is None or body.get("items") is None: + last_granule_message = "No granules received" + if len(self._granules) > 0: + last_granule = CmrGranule(self._granules[-1]) + last_granule_message = (f"{last_granule.concept_id()} {last_granule.native_id()}" + f" {last_granule.start_date()} {last_granule.end_date()}") + self._logger.error( + f"\nCMR problem:\n" + f"url: {url}\n" + f"cmr-search-after: {headers.get('cmr-search-after')}\n" + f"----------\n" + f"http_code: {response.status_code}\n" + f"body: {response.text}\n" + f"----------\n" + f"last_granule: {last_granule_message}\n" + ) + raise Exception("CMR error") + + # Update to latest page received + self._cmr_search_after = response.headers.get("cmr-search-after") + self._total_matching_granules = body["hits"] + self._granules = body["items"] + self._pages_loaded += 1 + + self._logger.info( + f"\nCMR PAGE LOAD # {self.pages_loaded()}:\n" + f"url: {url}\n" + f"cmr-search-after: {headers.get('cmr-search-after')}\n" + f"---------\n" + f"http_code: {response.status_code}\n" + f"hits: {body.get('hits')}\n" + f"granules in page: {len(self.granules())}\n" + ) + + return bool(self.granules) + + def is_done(self): + """Return True if all pages have been retrieved, or if page_limit reached""" + + return ( + # reached page limit + (self._page_limit and self._pages_loaded >= self._page_limit) or + # all pages have been loaded + (not self._cmr_search_after and self._pages_loaded > 0) + ) + + def granules(self): + """Return the most recently loaded page of granules""" + return self._granules + + def total_matching_granules(self): + """Return the total number of granules that match the search criteria (across all pages)""" + + return self._total_matching_granules + + def pages_loaded(self): + """Return the total number of granule search pages that have been loaded up to this point""" + return self._pages_loaded + +# +# Helpers +# + + +def _temporal_param(start_date, end_date): + """Convert start/end dates to formatted temporal url param for granule search""" + if not start_date and not end_date: + return "" + param = "&temporal=" + if start_date: + param += start_date + param += "," + if end_date: + param += end_date + return param diff --git a/podaac/hitide_backfill_tool/cnm_message_writer.py b/podaac/hitide_backfill_tool/cnm_message_writer.py new file mode 100644 index 0000000..30aa101 --- /dev/null +++ b/podaac/hitide_backfill_tool/cnm_message_writer.py @@ -0,0 +1,93 @@ +"""Create Cumulus CNM message from granule.""" + +import json + + +class CnmMessageWriter: + """Creates a Cumulus CNM message from granule.""" + + # pylint: disable=too-many-instance-attributes,too-many-arguments + + def __init__(self, + message_config, + collection_config, + search_start, + search_end, + provider, + cli_execution_id, + user): + """Create the CnmMessageWriter.""" + + self.collection_name = collection_config["name"] + self.collection_version = collection_config["version"] + self.collection_config = collection_config + self.provider = provider + self.search_start = search_start + self.search_end = search_end + self.cli_execution_id = cli_execution_id + self.user = user + self.create_template(message_config, collection_config) + + def create_template(self, message_config, collection_config): + """Create a message template from message config file and from a + cumulus collection config file. + """ + + collection_name = collection_config["name"] + + meta = dict(message_config) + meta["collection"] = collection_config + meta["collection"]["dataType"] = collection_name + meta["collection"]["files"].append({ + "bucket": "ia_public", + "regex": "^.*\\.png$", + "type": "metadata" + }) + + self.template = { + "cumulus_meta": { + "system_bucket": message_config["buckets"]["internal"]["name"] + }, + "meta": meta, + } + + def write(self, granule, needs_footprint, needs_image, needs_dmrpp, skip_cmr_opendap_update): + """Return a CNM message string given granule information.""" + + message = dict(self.template) + + s3_info = granule.s3_bucket_info() + message["payload"] = { + "granules": [{ + "cmrConceptId": granule.concept_id(), + "granuleId": granule.native_id(), + "dataType": self.collection_name, + "files": [{ + "bucket": s3_info["bucket"], + "key": s3_info["key"], + "fileName": s3_info["filename"], + "type": "data", + "size": granule.size(s3_info["filename"]) + }] + }] + } + + message["forge"] = needs_footprint + message["tig"] = needs_image + message["dmrpp"] = needs_dmrpp + message["skip_cmr_opendap_update"] = skip_cmr_opendap_update + + message["cli_params"] = { + "uuid": self.cli_execution_id, + "collection_short_name": self.collection_name, + "collection_version": self.collection_version, + "provider": self.provider, + "cmr_search_start": self.search_start, + "cmr_search_end": self.search_end, + "granule_start": granule.start_date(), + "granule_end": granule.end_date(), + "username": self.user + } + + print(message["payload"]) + return json.dumps(message) diff --git a/podaac/hitide_backfill_tool/config.py b/podaac/hitide_backfill_tool/config.py new file mode 100644 index 0000000..deb7f1d --- /dev/null +++ b/podaac/hitide_backfill_tool/config.py @@ -0,0 +1,59 @@ +""" +This module contains methods for retrieving configurations that +are used for creating cumulus messages +""" + +import os +from pathlib import Path + +from .file_util import load_json_file, make_absolute + + +def get_message_config(env, config_file=None): + """Retrieve message config file. + + The message config is specific to an environment (ops, uat, or sit) and + is the same for all collections in that environment + """ + if config_file is not None: + message_config = load_json_file(config_file) + else: + message_config = load_json_file( + "default_message_config.json", relative_to=__file__) + return message_config.get(env, {}) + + +def get_collection_config(base_dir, collection, env, logger): + """Retrieve a collection config + + A collection config is specific to a collection in a certain + environment (e.g. MODIS_A-JPL-L2P-v2019.0 in uat) + + base_dir is the root directory for cumulus-configurations + repository + """ + base_dir = make_absolute(base_dir) + + env_dir = "" + if "sit" in env: + env_dir = "sit" + elif "uat" in env: + env_dir = "uat" + elif "ops" in env: + env_dir = "ops" + + pattern = f"**/{env_dir}/**/{collection}.json" + logger.info(f"collection config: searching {base_dir} for {pattern}") + + if not os.path.isdir(base_dir): + raise Exception( + f"Tried to find cumulus-configurations directory: {base_dir}. It is not a directory") + + path = Path(base_dir) + files = path.glob(pattern) + try: + file = next(files) + logger.info(f"collection config: found {file}") + return load_json_file(file) + except Exception as exc: + raise Exception("Could not find collection config") from exc diff --git a/podaac/hitide_backfill_tool/config_creator.py b/podaac/hitide_backfill_tool/config_creator.py new file mode 100644 index 0000000..c4978c1 --- /dev/null +++ b/podaac/hitide_backfill_tool/config_creator.py @@ -0,0 +1,114 @@ +""" +This module is for creating a config file that can be used by the backfill tool +""" + +import argparse +from .file_util import make_absolute + + +CONFIG_TEMPLATE = """ +### CMR SEARCH OPTIONS ### +cmr: <<>> # required +collection: MODIS_A-JPL-L2P-v2019.0 # required +provider: <<>> # required +# start_date: "2020-01-01T00:00:00Z" +# end_date: "2020-02-01T00:00:00Z" +# page_size: 2000 +# page_limit: 2 +# edl_token: insert-edl-token-here +# launchpad_token: insert-launchpad-token-here + +### GRANULE PARSING OPTIONS ### +# geometries: [GPolygons, Lines] +# footprint: "on" +# image: "on" +# dmrpp: "on" +# dmrpp_min_version: "3.20.9-92" +use_data_url: true + +### FOR FINDING MESSAGE INFORMATION ### +cumulus: <<>> # required +cumulus_configurations: insert-cumulus-configurations-directory-location-here # required + +### MESSAGE SENDING OPTIONS ### +# sns_arn: <<>> +# aws_profile: <<>> +# message_file: /path/to/message/file.txt +# message_limit: 20 +# user: insert-optional-username-here +# preview: true + +### LOGS ### +# log_file: /path/to/desired/log/file.txt +# log_level: DEBUG + +""" + + +def create_config_string(cmr, provider, cumulus, aws_profile): + """Create a yaml formatted config string with params filled in""" + txt = CONFIG_TEMPLATE + txt = txt.replace("<<>>", cmr) + txt = txt.replace("<<>>", provider) + txt = txt.replace("<<>>", cumulus) + txt = txt.replace("<<>>", aws_profile) + return txt + + +def format_arg_name(name): + """Removes leading "--" and converts "-" to "_" + + Ex// --optional-value -> optional_value + """ + return name.replace("--", "").replace("-", "_") + + +def parse_args(args): + """Parse arguments given from command line""" + parser = argparse.ArgumentParser() + parser.add_argument("--env", required=True, choices=["ops", "uat", "sit"]) + parser.add_argument("--filename", required=True) + return parser.parse_args(args) + + +def create_defaults(env): + """Given a general environment name, generate default values for cmr, + provider, cumulus, sns_arn, and aws_profile + """ + if env == "ops": + return { + "cmr": "ops", + "provider": "pocloud", + "cumulus": "ops", + "aws_profile": "ngap-services-ops" + } + + if env == "uat": + return { + "cmr": "uat", + "provider": "pocloud", + "cumulus": "uat", + "aws_profile": "ngap-services-uat" + } + + if env == "sit": + return { + "cmr": "uat", + "provider": "pocumulus", + "cumulus": "sit", + "aws_profile": "ngap-services-sit" + } + + raise f"create_defaults({env}) - env must be ops | uat | sit" + + +def create_config(args=None): + """Create a config file given command line arguments""" + args = parse_args(args) + print(args) + + defaults = create_defaults(args.env) + config = create_config_string(**defaults) + filename = make_absolute(args.filename) + with open(filename, "w", encoding='utf-8') as file: + file.write(config) diff --git a/podaac/hitide_backfill_tool/default_message_config.json b/podaac/hitide_backfill_tool/default_message_config.json new file mode 100644 index 0000000..8adf0d3 --- /dev/null +++ b/podaac/hitide_backfill_tool/default_message_config.json @@ -0,0 +1,27 @@ +{ + "sit": { + "buckets": { + "internal": { + "name": "internal_bucket", + "type": "internal" + }, + }, + "stack": "sit", + "cmr": { + "clientId": "clientId", + "cmrEnvironment": "cmrEnvironment", + "cmrLimit": 100, + "cmrPageSize": 50, + "oauthProvider": "oauthProvider", + "passwordSecretName": "passwordSecretName", + "provider": "provider", + "username": "username" + }, + "distribution_endpoint": "https://example.com/", + "launchpad": { + "api": "https://launchpadapi.com", + "certificate": "certificate.pfx", + "passphraseSecretName": "passphraseSecretName" + } + } +} \ No newline at end of file diff --git a/podaac/hitide_backfill_tool/dmrpp_utils.py b/podaac/hitide_backfill_tool/dmrpp_utils.py new file mode 100644 index 0000000..f1d84f4 --- /dev/null +++ b/podaac/hitide_backfill_tool/dmrpp_utils.py @@ -0,0 +1,45 @@ +"""Static functions to read dmrpp files, parse version from file, and determine dmrpp state.""" + +import xml.etree.ElementTree as element_tree +from enum import Enum + + +class DmrppState(Enum): + """Represents a granule's dmrpp file state/status.""" + + OLDER_VERSION = 1 # Will update + MISSING_VERSION = 2 # Will update + MATCHED_VERSION = 3 # Don't update + NEWER_VERSION = 4 # Don't update + + +def parse_version(version_string): + """Parses input version_string string and returns it as a tuple in format (X, X, X, X).""" + + try: + version, build = version_string.split("-") + version = [int(v) for v in version.split(".")] + build = int(build) + return tuple(version + [build]) + except Exception as exc: + raise Exception(f"Could not parse version string {version_string}") from exc + + +def get_dmrpp_version(s3, s3_dmrpp_url): + """Returns the version string from the dmrpp file. If file or version not found, + returns "" (empty string).""" + # pylint: disable=C0103 + + version = "" + + try: + xml = s3.read_file_from_s3(s3_dmrpp_url) + root = element_tree.fromstring(xml) + + for attr_name, attr_value in root.items(): + if attr_name.endswith("version"): + version = attr_value + break + except Exception: # pylint: disable=W0703 + pass + return version diff --git a/podaac/hitide_backfill_tool/failed_workflow.py b/podaac/hitide_backfill_tool/failed_workflow.py new file mode 100644 index 0000000..fbbf64b --- /dev/null +++ b/podaac/hitide_backfill_tool/failed_workflow.py @@ -0,0 +1,129 @@ +"""Script to get unique errors from step function workflows""" +# pylint: disable=C0103, C0301 + +import argparse +import json +import boto3 +from fuzzywuzzy import fuzz + + +def parse_args(): + """function to get argument parameters""" + + parser = argparse.ArgumentParser(description='Process some integers.') + + parser.add_argument('--workflow_arn', help='aws workflow arn', required=True) + parser.add_argument('--profile_name', help='aws profile name', required=True) + parser.add_argument('--limit', help='aws profile name', type=int, required=False) + + return parser.parse_args() + + +if __name__ == "__main__": + + args = parse_args() + workflow_arn = args.workflow_arn + profile_name = args.profile_name + limit = args.limit + processed = 0 + + session = boto3.Session(profile_name=profile_name) + client = session.client('stepfunctions') + + errors = [] + + max_results = 1000 + if limit and limit <= 1000: + max_results = limit + elif limit and limit > 1000: + max_results = 1000 + + response = client.list_executions( + stateMachineArn=workflow_arn, + statusFilter='FAILED', + maxResults=max_results, + ) + + next_token = response.get('nextToken') + + while next_token is not None: + + next_token = response.get('nextToken') + + for a in response.get('executions'): + execution_arn = a.get('executionArn') + + response_execution = client.get_execution_history( + executionArn=execution_arn + ) + + res = response_execution.get('events')[-2] + execution_input = res.get('stateEnteredEventDetails')['input'] + execution_input_dict = json.loads(execution_input) + details = execution_input_dict.get('details') + exception = execution_input_dict.get('exception') + execution_name = execution_input_dict['cumulus_meta']['execution_name'] + + granule_id = execution_input_dict.get('payload').get('granules')[0].get('granuleId') + collection = execution_input_dict.get('payload').get('granules')[0].get('dataType') + + if details: + error_string = details.get('errorMessage') + if len(errors) == 0: + errors.append({'error': error_string, 'execution_name': execution_name, 'execution_arn': execution_arn, 'granule_id': granule_id, 'collection': collection}) + else: + max_ratio = 0 + for e in errors: + ratio = fuzz.partial_ratio(e, error_string) + max_ratio = max(ratio, max_ratio) + + if max_ratio < 50: + errors.append({'error': error_string, 'execution_name': execution_name, 'execution_arn': execution_arn, 'granule_id': granule_id, 'collection': collection}) + elif exception: + error_string = exception.get('Cause') + if len(errors) == 0: + errors.append({'error': error_string, 'execution_name': execution_name, 'execution_arn': execution_arn, 'granule_id': granule_id, 'collection': collection}) + else: + max_ratio = 0 + for e in errors: + ratio = fuzz.partial_ratio(e, error_string) + max_ratio = max(ratio, max_ratio) + + if max_ratio < 50: + errors.append({'error': error_string, 'execution_name': execution_name, 'execution_arn': execution_arn, 'granule_id': granule_id, 'collection': collection}) + else: + print("Unprocessed ERROR MESSAGE") + print(json.loads(execution_input)) + + processed += max_results + + if next_token: + + if limit and processed < limit: + + if limit - processed < 1000: + max_results = limit - processed + else: + max_results = 1000 + + elif processed == limit: + break + + response = client.list_executions( + stateMachineArn=workflow_arn, + statusFilter='FAILED', + maxResults=max_results, + nextToken=next_token + ) + + else: + next_token = None + + print("Number of unique errors found:", len(errors)) + for e in errors: + print('Execution UUID : ', e.get('execution_name')) + print('Execution Arn : ', e.get('execution_arn')) + print('Collection :', e.get('collection')) + print('Granule Id :', e.get('granule_id')) + print('Error : ', e.get('error')) + print("#############################################################") diff --git a/podaac/hitide_backfill_tool/file_util.py b/podaac/hitide_backfill_tool/file_util.py new file mode 100644 index 0000000..0e01bd8 --- /dev/null +++ b/podaac/hitide_backfill_tool/file_util.py @@ -0,0 +1,42 @@ +"""Utility functions for working with files""" + +import json +from os import getcwd +from os.path import expanduser, expandvars, normpath, join, isabs, dirname +import yaml + + +def make_absolute(path, relative_to=None): + """Convert path to absolute path. + + Does nothing if path is already absolute path. + Expands environment variables and ~. + If path is relative, will be resolved relative to the relative_to + param (if provided) or the cwd.""" + expanded_path = expandvars(expanduser(path)) + if isabs(expanded_path): + return expanded_path + cwd = dirname(relative_to) if relative_to else getcwd() + return normpath(join(cwd, path)) + + +def load_yaml_file(path, relative_to=None): + """Create dict from yaml file at location path""" + abs_path = make_absolute(path, relative_to) + with open(abs_path, encoding='utf-8') as stream: + return yaml.safe_load(stream) + + +def write_yaml_file(path, content): + """Write dict (content) yaml file at path""" + yaml_string = yaml.dump(content, default_flow_style=False) + abs_path = make_absolute(path) + with open(abs_path, 'w', encoding='utf-8') as stream: + stream.write(yaml_string) + + +def load_json_file(path, relative_to=None): + """Create dict from json file at location path""" + abs_path = make_absolute(path, relative_to) + with open(abs_path, encoding='utf-8') as stream: + return json.loads(stream.read()) diff --git a/podaac/hitide_backfill_tool/memory_profiler.py b/podaac/hitide_backfill_tool/memory_profiler.py new file mode 100644 index 0000000..e14bd59 --- /dev/null +++ b/podaac/hitide_backfill_tool/memory_profiler.py @@ -0,0 +1,230 @@ +# pylint: disable=redefined-outer-name, line-too-long, too-many-locals + +"""Script to profile lambda performance""" + +import json +import re +import time +import statistics +import csv +import argparse +from collections import defaultdict +import boto3 + +request_collection = {} +memory_collection = defaultdict(list) +billed_collection = defaultdict(list) + + +def execute_query_with_pagination(query, start_time, end_time, client, log_group): + """Function to execute query with pagination""" + + log_group_name = f'/aws/lambda/{log_group}' + + response = client.start_query( + logGroupName=log_group_name, + startTime=start_time, + endTime=end_time, + queryString=query, + limit=10000 + ) + + query_id = response['queryId'] + + while True: + query_status = client.get_query_results(queryId=query_id) + status = query_status['status'] + if status == 'Complete': + break + if status in ['Failed', 'Cancelled']: + print("Query execution failed or was cancelled.") + break + time.sleep(1) + + # Retrieve initial results + results = client.get_query_results(queryId=query_id) + data = results['results'] + return data + + +def execute_query_for_minute(query, minute_start_time, minute_end_time, client, log_group): + """Function to execute query for a given minute""" + + results = execute_query_with_pagination(query, minute_start_time, minute_end_time, client, log_group) + return results + + +def execute_query_for_time_range(query, start_time, end_time, client, log_group): + """Function to execute query for a given time range, minute by minute""" + + all_results = [] + current_time = start_time + + while current_time < end_time: + minute_start_time = current_time + minute_end_time = current_time + (300 * 1000) + results = execute_query_for_minute(query, minute_start_time, minute_end_time, client, log_group) + all_results.extend(results) + current_time = minute_end_time + + return all_results + + +def process_items(items): + """Function to process a collection for stats""" + filtered_items = [x for x in items if x is not None] + if not filtered_items: + filtered_items = [0] + + minimum = min(filtered_items) + maximum = max(filtered_items) + sampled = len(filtered_items) + average = round(sum(filtered_items) / sampled, 1) + median = statistics.median(filtered_items) + + return minimum, maximum, average, median, sampled + + +def parse_arguments(): + """Parse command-line arguments.""" + parser = argparse.ArgumentParser(description="Analyze AWS Lambda logs.") + parser.add_argument('--aws_lambda_log', type=str, help="Lambda log to profile", required=True) + parser.add_argument('--aws_profile', type=str, help="AWS profile to use", required=True) + parser.add_argument('--start_time', type=int, help="Start time (hours ago) to analyze", default=1) + return parser.parse_args() + + +def setup_aws_client(profile_name): + """Set up AWS boto3 client for CloudWatch Logs.""" + session = boto3.Session(profile_name=profile_name) + return session.client('logs') + + +def compile_patterns(): + """Compile and return regex patterns.""" + request_id_pattern = re.compile(r"RequestId: (\S+)") + memory_used_pattern = re.compile(r"Max Memory Used: (\d+) MB") + billed_duration_pattern = re.compile(r"Billed Duration: (\d+) ms") + return request_id_pattern, memory_used_pattern, billed_duration_pattern + + +def execute_combined_query(client, log_group_name, start_time, end_time): + """Execute a combined query on CloudWatch Logs.""" + combined_query = """ + fields @timestamp, @message + | filter (@message like /Max Memory Used:/ or @message like /aws_request_id/) + """ + return execute_query_for_time_range(combined_query, start_time, end_time, client, log_group_name) + + +def process_results(response_query, request_id_pattern, memory_used_pattern, billed_duration_pattern): + """Process results from the CloudWatch Logs query.""" + request_collection = {} + + for result in response_query: + text = result[1]['value'] + + if 'aws_request_id' in text: + process_aws_request_id(text, request_collection) + elif 'Max Memory Used:' in text: + process_max_memory_used(text, request_id_pattern, memory_used_pattern, billed_duration_pattern, request_collection) + + return request_collection + + +def process_aws_request_id(text, request_collection): + """Process and update request collection for aws_request_id.""" + try: + message = json.loads(json.loads(text).get('message', '{}')) + request_id = message.get('aws_request_id') + collection = message.get('collection') + + if request_id: + request_collection.setdefault(request_id, {}).update({ + "request_id": request_id, + "collection": collection + }) + except (json.JSONDecodeError, TypeError): + pass + + +def process_max_memory_used(text, request_id_pattern, memory_used_pattern, billed_duration_pattern, request_collection): + """Process and update request collection for Max Memory Used.""" + request_id_match = request_id_pattern.search(text) + memory_used_match = memory_used_pattern.search(text) + billed_duration_match = billed_duration_pattern.search(text) + + if request_id_match and memory_used_match and billed_duration_match: + request_id = request_id_match.group(1) + memory_used = int(memory_used_match.group(1)) + billed_duration = int(billed_duration_match.group(1)) + + request_collection.setdefault(request_id, {}).update({ + "memory_used": memory_used, + "billed_duration": billed_duration + }) + + +def update_memory_billed_collections(request_collection): + """Update memory and billed collections from request_collection.""" + memory_collection = defaultdict(list) + billed_collection = defaultdict(list) + + for item in request_collection.values(): + collection = item.get('collection') + if collection: + memory_collection[collection].append(item.get('memory_used')) + billed_collection[collection].append(item.get('billed_duration')) + + return memory_collection, billed_collection + + +def write_csv(memory_collection, billed_collection): + """Write collection statistics to a CSV file.""" + csv_filename = "collection_statistics.csv" + header = [ + "Collection", "Mem Max", "Mem Min", "Mem Med", "Mem Avg", + "Bill Max", "Bill Min", "Bill Med", "Bill Avg", "Sampled" + ] + + with open(csv_filename, mode='w', newline='') as file: # pylint: disable=unspecified-encoding + writer = csv.writer(file) + writer.writerow(header) + + for key in sorted(memory_collection.keys()): + item = memory_collection.get(key, []) + bill_item = billed_collection.get(key, []) + + minimum, maximum, average, median, sampled = process_items(item) + bill_min, bill_max, bill_avg, bill_med, _ = process_items(bill_item) + + row = [ + key, maximum, minimum, median, average, + bill_max, bill_min, bill_med, bill_avg, sampled + ] + writer.writerow(row) + + +def main(): + """Main function for the script.""" + args = parse_arguments() + client = setup_aws_client(args.aws_profile) + + request_id_pattern, memory_used_pattern, billed_duration_pattern = compile_patterns() + + start_time = int((time.time() - args.start_time * 3600) * 1000) + end_time = int((time.time()) * 1000) + + response_query = execute_combined_query(client, args.aws_lambda_log, start_time, end_time) + + request_collection = process_results( + response_query, request_id_pattern, memory_used_pattern, billed_duration_pattern + ) + + memory_collection, billed_collection = update_memory_billed_collections(request_collection) + + write_csv(memory_collection, billed_collection) + + +if __name__ == "__main__": + main() diff --git a/podaac/hitide_backfill_tool/regression.py b/podaac/hitide_backfill_tool/regression.py new file mode 100644 index 0000000..e925f7b --- /dev/null +++ b/podaac/hitide_backfill_tool/regression.py @@ -0,0 +1,63 @@ +""" +============== +regression.py +============== + +Test TIG on all our collections. +""" +import argparse +import os +import subprocess +import requests + + +def make_cli_call(command): + """Function to make cli calls""" + try: + output = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT) + return output.decode("utf-8") # Decoding the output bytes to string + except subprocess.CalledProcessError as e: + return e.output.decode("utf-8") # Decoding the error output bytes to string + + +def download_configs(config_dir): + """Function to download all the forge tig configs from github""" + os.makedirs(config_dir, exist_ok=True) + + print("..... downloading configuration files") + api_url = "https://api.github.com/repos/podaac/forge-tig-configuration/contents/config-files" + response = requests.get(api_url, timeout=60) + + if response.status_code == 200: + for file in response.json(): + url = file.get('download_url') + config_file = requests.get(url, timeout=60) + local_filename = file.get('name') + local_path = os.path.join(config_dir, local_filename) + with open(local_path, 'wb') as file: + file.write(config_file.content) + + +def main(): + """main function for regression""" + parser = argparse.ArgumentParser() + parser.add_argument('--backfill_config', type=str, + help="path to backfill config", required=True) + args = parser.parse_args() + + test_dir = os.path.dirname(os.path.realpath(__file__)) + config_directory = f'{test_dir}/dl_configs' + download_configs(config_directory) + + files = os.listdir(config_directory) + print(files) + + for _file in files: + collection = _file.strip('.cfg') + cli_command = f'backfill --config {args.backfill_config} --collection {collection}' + result = make_cli_call(cli_command) + print(result) + + +if __name__ == "__main__": + main() diff --git a/podaac/hitide_backfill_tool/replay.py b/podaac/hitide_backfill_tool/replay.py new file mode 100644 index 0000000..88f7105 --- /dev/null +++ b/podaac/hitide_backfill_tool/replay.py @@ -0,0 +1,63 @@ +"""Script to get messages off dead letter queue and back into the queue""" + +from argparse import ArgumentParser +import sys +import boto3 +from podaac.hitide_backfill_tool.file_util import load_yaml_file + + +def replay(profile, dlq_url, sqs_url): + """Function to get messages off dead letter queue to the sqs""" + + # Create a Boto3 session with the specified profile + session = boto3.Session(profile_name=profile) + + # Create an SQS client using the session + sqs = session.client('sqs') + + # Retrieve and move messages from the DLQ to the regular queue + while True: + response = sqs.receive_message( + QueueUrl=dlq_url, + MaxNumberOfMessages=1, # Number of messages to retrieve at a time + ) + messages = response.get('Messages', []) + + if not messages: + break + + for message in messages: + # Send the message back to the regular queue + sqs.send_message(QueueUrl=sqs_url, MessageBody=message['Body']) + + # Delete the message from the DLQ + sqs.delete_message(QueueUrl=dlq_url, ReceiptHandle=message['ReceiptHandle']) + + +def main(args=None): + """main function to get arguments and call replay functions""" + + if args is None: + args = sys.argv[1:] + elif isinstance(args, str): + args = args.split() + + parser = ArgumentParser() + parser.add_argument("--config", required=True) + + args = parser.parse_args(args) + + config = load_yaml_file(args.config) + + aws_profile = config.get('aws_profile') + dlq_url = config.get('dlq_url') + sqs_url = config.get('sqs_url') + + if aws_profile is None or dlq_url is None or sqs_url is None: + print("Please include aws_profile, dlq_url, and sqs_url in the config") + else: + replay(aws_profile, dlq_url, sqs_url) + + +if __name__ == "__main__": + main() diff --git a/podaac/hitide_backfill_tool/s3_reader.py b/podaac/hitide_backfill_tool/s3_reader.py new file mode 100644 index 0000000..10e3db6 --- /dev/null +++ b/podaac/hitide_backfill_tool/s3_reader.py @@ -0,0 +1,47 @@ +"""Read files from S3""" + +from urllib.parse import urlparse +import multiprocessing +from botocore.client import Config +from botocore.exceptions import ClientError +import boto3 + + +class S3Reader: + """Read files from S3""" + + def __init__(self, logger, aws_profile): + """Create S3Reader""" + logger.info("Checking S3 settings") + config = Config(max_pool_connections=multiprocessing.cpu_count() * 4) + if aws_profile: + self.client = boto3.session.Session( + profile_name=aws_profile).client('s3', config=config) + else: + self.client = boto3.client('s3', config=config) + + self.logger = logger + + # check access to s3 + try: + self.client.list_buckets() + logger.debug("S3Reader able to access S3") + except ClientError as exc: + raise Exception("S3Reader couldn't connect to S3") from exc + + def extract_bucket_and_file(self, s3_path): + """Returns bucket_name and key from s3 path.""" + + url = urlparse(s3_path, allow_fragments=False) + return url.netloc, url.path.lstrip('/') + + def read_file_from_s3(self, s3_path): + """Returns contents of file from S3 path. Assumes contents are in ISO-8859-1 encoding.""" + + try: + bucket_name, file_name = self.extract_bucket_and_file(s3_path) + response = self.client.get_object(Bucket=bucket_name, Key=file_name) + + return response["Body"].read().decode("ISO-8859-1") + except ClientError as exc: + raise Exception(f"S3Reader could not read file at {s3_path}.") from exc diff --git a/podaac/hitide_backfill_tool/sns_message_sender.py b/podaac/hitide_backfill_tool/sns_message_sender.py new file mode 100644 index 0000000..2ccc692 --- /dev/null +++ b/podaac/hitide_backfill_tool/sns_message_sender.py @@ -0,0 +1,101 @@ +"""Send messages to a file or SNS""" + +import sys +import logging +import multiprocessing +from botocore.client import Config +from botocore.exceptions import ClientError +import boto3 + +from podaac.hitide_backfill_tool.file_util import make_absolute + + +class SnsMessageSender: + """Send messages to SNS""" + + name = "sns" + + def __init__(self, topic_arn, logger, aws_profile): + """Create SnsMessageSender""" + logger.info("Checking SNS settings") + config = Config(max_pool_connections=multiprocessing.cpu_count() * 4) + if aws_profile: + self.client = boto3.session.Session( + profile_name=aws_profile).client('sns', config=config) + else: + self.client = boto3.client('sns', config=config) + + # check access to sns + try: + self.client.list_topics() + logger.debug("SnsMessageSender able to connect to SNS") + except ClientError as exc: + raise Exception("SnsMessageSender couldn't connect to SNS") from exc + + # check "connection" to sns topic + try: + self.client.get_topic_attributes( + TopicArn=topic_arn + ) + logger.debug( + f"SnsMessageSender able to access SNS topic: {topic_arn}") + except ClientError as exc: + raise Exception( + f"SnsMessageSender given invalid topic_arn: {topic_arn}") from exc + + self.topic_arn = topic_arn + self.logger = logger + self.messages_sent = 0 + + def send(self, message): + """Send message to SNS topic""" + try: + self.client.publish( + TopicArn=self.topic_arn, + Message=message + ) + self.messages_sent += 1 + except ClientError as exc: + self.logger.error(f""" + SNS Message Sender Failure + {exc} + ------------- + {message} + """) + + def close(self): + """Release resources""" + + +class FileMessageSender: + """Send messages to file""" + + name = "file" + messages_sent = 0 + + def __init__(self, filename=None, logger=logging): + """Create FileMessageSender""" + self.logger = logger + + if filename is None: + self.file = None + + elif filename == "stdout": + self.file = sys.stdout + + else: + abs_path = make_absolute(filename) + # pylint: disable=consider-using-with; need file to stay open for lifetime of object + self.file = open(abs_path, 'a', encoding="utf-8") + + def send(self, message): + """Send message to file""" + if not self.file: + return + self.file.write(f"\n{message}\n") + self.messages_sent += 1 + + def close(self): + """Release resources""" + if self.file and self.file != sys.stdout: + self.file.close() diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..f6cf613 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1243 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "astroid" +version = "3.2.4" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "boto3" +version = "1.35.13" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.35.13-py3-none-any.whl", hash = "sha256:6e220eae161a4c0ed21e2561edcb0fd9603fa621692c50bc099db318ed3e3ad4"}, + {file = "boto3-1.35.13.tar.gz", hash = "sha256:4af17bd7bada591ddaa835d774b242705210e5d45133e25bd73417daa42e53e7"}, +] + +[package.dependencies] +botocore = ">=1.35.13,<1.36.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.35.13" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.35.13-py3-none-any.whl", hash = "sha256:dd8a8bb1946187c8eb902a3b856d3b24df63917e4f2c61a6bce7f3ea9f112761"}, + {file = "botocore-1.35.13.tar.gz", hash = "sha256:f7ae62eab44d731a5ad8917788378316c79c7bceb530a8307ed0f3bca7037341"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.21.2)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "43.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dill" +version = "0.3.8" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "flake8" +version = "7.1.1" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "fuzzywuzzy" +version = "0.18.0" +description = "Fuzzy string matching in python" +optional = false +python-versions = "*" +files = [ + {file = "fuzzywuzzy-0.18.0-py2.py3-none-any.whl", hash = "sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993"}, + {file = "fuzzywuzzy-0.18.0.tar.gz", hash = "sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8"}, +] + +[package.extras] +speedup = ["python-levenshtein (>=0.12)"] + +[[package]] +name = "idna" +version = "3.8" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "levenshtein" +version = "0.25.1" +description = "Python extension for computing string edit distances and similarities." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Levenshtein-0.25.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eb4d1ec9f2dcbde1757c4b7fb65b8682bc2de45b9552e201988f287548b7abdf"}, + {file = "Levenshtein-0.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4d9fa3affef48a7e727cdbd0d9502cd060da86f34d8b3627edd769d347570e2"}, + {file = "Levenshtein-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1b6cd186e58196ff8b402565317e9346b408d0c04fa0ed12ce4868c0fcb6d03"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82637ef5428384dd1812849dd7328992819bf0c4a20bff0a3b3ee806821af7ed"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e73656da6cc3e32a6e4bcd48562fcb64599ef124997f2c91f5320d7f1532c069"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5abff796f92cdfba69b9cbf6527afae918d0e95cbfac000bd84017f74e0bd427"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38827d82f2ca9cb755da6f03e686866f2f411280db005f4304272378412b4cba"}, + {file = "Levenshtein-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b989df1e3231261a87d68dfa001a2070771e178b09650f9cf99a20e3d3abc28"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2011d3b3897d438a2f88ef7aed7747f28739cae8538ec7c18c33dd989930c7a0"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6c375b33ec7acc1c6855e8ee8c7c8ac6262576ffed484ff5c556695527f49686"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ce0cb9dd012ef1bf4d5b9d40603e7709b6581aec5acd32fcea9b371b294ca7aa"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9da9ecb81bae67d784defed7274f894011259b038ec31f2339c4958157970115"}, + {file = "Levenshtein-0.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3bd7be5dbe5f4a1b691f381e39512927b39d1e195bd0ad61f9bf217a25bf36c9"}, + {file = "Levenshtein-0.25.1-cp310-cp310-win32.whl", hash = "sha256:f6abb9ced98261de67eb495b95e1d2325fa42b0344ed5763f7c0f36ee2e2bdba"}, + {file = "Levenshtein-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:97581af3e0a6d359af85c6cf06e51f77f4d635f7109ff7f8ed7fd634d8d8c923"}, + {file = "Levenshtein-0.25.1-cp310-cp310-win_arm64.whl", hash = "sha256:9ba008f490788c6d8d5a10735fcf83559965be97e4ef0812db388a84b1cc736a"}, + {file = "Levenshtein-0.25.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f57d9cf06dac55c2d2f01f0d06e32acc074ab9a902921dc8fddccfb385053ad5"}, + {file = "Levenshtein-0.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:22b60c6d791f4ca67a3686b557ddb2a48de203dae5214f220f9dddaab17f44bb"}, + {file = "Levenshtein-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d0444ee62eccf1e6cedc7c5bc01a9face6ff70cc8afa3f3ca9340e4e16f601a4"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e8758be8221a274c83924bae8dd8f42041792565a3c3bdd3c10e3f9b4a5f94e"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:147221cfb1d03ed81d22fdd2a4c7fc2112062941b689e027a30d2b75bbced4a3"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a454d5bc4f4a289f5471418788517cc122fcc00d5a8aba78c54d7984840655a2"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c25f3778bbac78286bef2df0ca80f50517b42b951af0a5ddaec514412f79fac"}, + {file = "Levenshtein-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:181486cf465aff934694cc9a19f3898a1d28025a9a5f80fc1608217e7cd1c799"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8db9f672a5d150706648b37b044dba61f36ab7216c6a121cebbb2899d7dfaa3"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f2a69fe5ddea586d439f9a50d0c51952982f6c0db0e3573b167aa17e6d1dfc48"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:3b684675a3bd35efa6997856e73f36c8a41ef62519e0267dcbeefd15e26cae71"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:cc707ef7edb71f6bf8339198b929ead87c022c78040e41668a4db68360129cef"}, + {file = "Levenshtein-0.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:41512c436b8c691326e2d07786d906cba0e92b5e3f455bf338befb302a0ca76d"}, + {file = "Levenshtein-0.25.1-cp311-cp311-win32.whl", hash = "sha256:2a3830175c01ade832ba0736091283f14a6506a06ffe8c846f66d9fbca91562f"}, + {file = "Levenshtein-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:9e0af4e6e023e0c8f79af1d1ca5f289094eb91201f08ad90f426d71e4ae84052"}, + {file = "Levenshtein-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:38e5d9a1d737d7b49fa17d6a4c03a0359288154bf46dc93b29403a9dd0cd1a7d"}, + {file = "Levenshtein-0.25.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4a40fa16ecd0bf9e557db67131aabeea957f82fe3e8df342aa413994c710c34e"}, + {file = "Levenshtein-0.25.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4f7d2045d5927cffa65a0ac671c263edbfb17d880fdce2d358cd0bda9bcf2b6d"}, + {file = "Levenshtein-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f96590539f9815be70e330b4d2efcce0219db31db5a22fffe99565192f5662"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d78512dd25b572046ff86d8903bec283c373063349f8243430866b6a9946425"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c161f24a1b216e8555c874c7dd70c1a0d98f783f252a16c9face920a8b8a6f3e"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:06ebbfd010a00490795f478d18d7fa2ffc79c9c03fc03b678081f31764d16bab"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa9ec0a4489ebfb25a9ec2cba064ed68d0d2485b8bc8b7203f84a7874755e0f"}, + {file = "Levenshtein-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26408938a6db7b252824a701545d50dc9cdd7a3e4c7ee70834cca17953b76ad8"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:330ec2faff957281f4e6a1a8c88286d1453e1d73ee273ea0f937e0c9281c2156"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9115d1b08626dfdea6f3955cb49ba5a578f7223205f80ead0038d6fc0442ce13"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:bbd602edab758e93a5c67bf0d8322f374a47765f1cdb6babaf593a64dc9633ad"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b930b4df32cd3aabbed0e9f0c4fdd1ea4090a5c022ba9f1ae4ab70ccf1cf897a"}, + {file = "Levenshtein-0.25.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd66fb51f88a3f73a802e1ff19a14978ddc9fbcb7ce3a667ca34f95ef54e0e44"}, + {file = "Levenshtein-0.25.1-cp312-cp312-win32.whl", hash = "sha256:386de94bd1937a16ae3c8f8b7dd2eff1b733994ecf56ce4d05dfdd0e776d0261"}, + {file = "Levenshtein-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:9ee1902153d47886c9787598a4a5c324ce7fde44d44daa34fcf3652ac0de21bc"}, + {file = "Levenshtein-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:b56a7e7676093c3aee50402226f4079b15bd21b5b8f1820f9d6d63fe99dc4927"}, + {file = "Levenshtein-0.25.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6b5dfdf6a0e2f35fd155d4c26b03398499c24aba7bc5db40245789c46ad35c04"}, + {file = "Levenshtein-0.25.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:355ff797f704459ddd8b95354d699d0d0642348636c92d5e67b49be4b0e6112b"}, + {file = "Levenshtein-0.25.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:933b827a3b721210fff522f3dca9572f9f374a0e88fa3a6c7ee3164406ae7794"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be1da669a240f272d904ab452ad0a1603452e190f4e03e886e6b3a9904152b89"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:265cbd78962503a26f2bea096258a3b70b279bb1a74a525c671d3ee43a190f9c"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63cc4d53a35e673b12b721a58b197b4a65734688fb72aa1987ce63ed612dca96"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75fee0c471b8799c70dad9d0d5b70f1f820249257f9617601c71b6c1b37bee92"}, + {file = "Levenshtein-0.25.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:045d6b0db124fbd37379b2b91f6d0786c2d9220e7a848e2dd31b99509a321240"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:db7a2e9c51ac9cc2fd5679484f1eac6e0ab2085cb181240445f7fbf10df73230"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c379c588aa0d93d4607db7eb225fd683263d49669b1bbe49e28c978aa6a4305d"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:966dd00424df7f69b78da02a29b530fbb6c1728e9002a2925ed7edf26b231924"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:09daa6b068709cc1e68b670a706d928ed8f0b179a26161dd04b3911d9f757525"}, + {file = "Levenshtein-0.25.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d6bed0792635081accf70a7e11cfece986f744fddf46ce26808cd8bfc067e430"}, + {file = "Levenshtein-0.25.1-cp38-cp38-win32.whl", hash = "sha256:28e7b7faf5a745a690d1b1706ab82a76bbe9fa6b729d826f0cfdd24fd7c19740"}, + {file = "Levenshtein-0.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:8ca0cc9b9e07316b5904f158d5cfa340d55b4a3566ac98eaac9f087c6efb9a1a"}, + {file = "Levenshtein-0.25.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:45682cdb3ac4a5465c01b2dce483bdaa1d5dcd1a1359fab37d26165b027d3de2"}, + {file = "Levenshtein-0.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f8dc3e63c4cd746ec162a4cd744c6dde857e84aaf8c397daa46359c3d54e6219"}, + {file = "Levenshtein-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:01ad1eb09933a499a49923e74e05b1428ca4ef37fed32965fef23f1334a11563"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbb4e8c4b8b7bbe0e1aa64710b806b6c3f31d93cb14969ae2c0eff0f3a592db8"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48d1fe224b365975002e3e2ea947cbb91d2936a16297859b71c4abe8a39932c"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a164df16d876aab0a400f72aeac870ea97947ea44777c89330e9a16c7dc5cc0e"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:995d3bcedcf64be6ceca423f6cfe29184a36d7c4cbac199fdc9a0a5ec7196cf5"}, + {file = "Levenshtein-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdaf62d637bef6711d6f3457e2684faab53b2db2ed53c05bc0dc856464c74742"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:af9de3b5f8f5f3530cfd97daab9ab480d1b121ef34d8c0aa5bab0c645eae219e"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:78fba73c352383b356a30c4674e39f086ffef7122fa625e7550b98be2392d387"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:9e0df0dcea3943321398f72e330c089b5d5447318310db6f17f5421642f3ade6"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:387f768bb201b9bc45f0f49557e2fb9a3774d9d087457bab972162dcd4fd352b"}, + {file = "Levenshtein-0.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5dcf931b64311039b43495715e9b795fbd97ab44ba3dd6bf24360b15e4e87649"}, + {file = "Levenshtein-0.25.1-cp39-cp39-win32.whl", hash = "sha256:2449f8668c0bd62a2b305a5e797348984c06ac20903b38b3bab74e55671ddd51"}, + {file = "Levenshtein-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:28803fd6ec7b58065621f5ec0d24e44e2a7dc4842b64dcab690cb0a7ea545210"}, + {file = "Levenshtein-0.25.1-cp39-cp39-win_arm64.whl", hash = "sha256:0b074d452dff8ee86b5bdb6031aa32bb2ed3c8469a56718af5e010b9bb5124dc"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e9e060ef3925a68aeb12276f0e524fb1264592803d562ec0306c7c3f5c68eae0"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f84b84049318d44722db307c448f9dcb8d27c73525a378e901189a94889ba61"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e23fdf330cb185a0c7913ca5bd73a189dfd1742eae3a82e31ed8688b191800"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06958e4a81ea0f0b2b7768a2ad05bcd50a9ad04c4d521dd37d5730ff12decdc"}, + {file = "Levenshtein-0.25.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2ea7c34ec22b2fce21299b0caa6dde6bdebafcc2970e265853c9cfea8d1186da"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fddc0ccbdd94f57aa32e2eb3ac8310d08df2e175943dc20b3e1fc7a115850af4"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d52249cb3448bfe661d3d7db3a6673e835c7f37b30b0aeac499a1601bae873d"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8dd4c201b15f8c1e612f9074335392c8208ac147acbce09aff04e3974bf9b16"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23a4d95ce9d44161c7aa87ab76ad6056bc1093c461c60c097054a46dc957991f"}, + {file = "Levenshtein-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:65eea8a9c33037b23069dca4b3bc310e3c28ca53f60ec0c958d15c0952ba39fa"}, + {file = "Levenshtein-0.25.1.tar.gz", hash = "sha256:2df14471c778c75ffbd59cb64bbecfd4b0ef320ef9f80e4804764be7d5678980"}, +] + +[package.dependencies] +rapidfuzz = ">=3.8.0,<4.0.0" + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "moto" +version = "4.2.14" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "moto-4.2.14-py2.py3-none-any.whl", hash = "sha256:6d242dbbabe925bb385ddb6958449e5c827670b13b8e153ed63f91dbdb50372c"}, + {file = "moto-4.2.14.tar.gz", hash = "sha256:8f9263ca70b646f091edcc93e97cda864a542e6d16ed04066b1370ed217bd190"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.12.201" +cryptography = ">=3.3.1" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.13.0" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +apigatewayv2 = ["PyYAML (>=5.1)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"] +ec2 = ["sshpubkeys (>=3.1.0)"] +glue = ["pyparsing (>=3.0.7)"] +iotdata = ["jsondiff (>=1.1.2)"] +proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.0)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.0)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +ssm = ["PyYAML (>=5.1)"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.12.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pylint" +version = "3.2.7" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"}, + {file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"}, +] + +[package.dependencies] +astroid = ">=3.2.4,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pytest" +version = "8.3.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-levenshtein" +version = "0.25.1" +description = "Python extension for computing string edit distances and similarities." +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-Levenshtein-0.25.1.tar.gz", hash = "sha256:b21e7efe83c8e8dc8260f2143b2393c6c77cb2956f0c53de6c4731c4d8006acc"}, + {file = "python_Levenshtein-0.25.1-py3-none-any.whl", hash = "sha256:654446d1ea4acbcc573d44c43775854834a7547e4cb2f79f638f738134d72037"}, +] + +[package.dependencies] +Levenshtein = "0.25.1" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "rapidfuzz" +version = "3.9.7" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ccf68e30b80e903f2309f90a438dbd640dd98e878eeb5ad361a288051ee5b75c"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:696a79018ef989bf1c9abd9005841cee18005ccad4748bad8a4c274c47b6241a"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eebf6c93af0ae866c22b403a84747580bb5c10f0d7b51c82a87f25405d4dcb"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e9125377fa3d21a8abd4fbdbcf1c27be73e8b1850f0b61b5b711364bf3b59db"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c12d180b17a22d107c8747de9c68d0b9c1d15dcda5445ff9bf9f4ccfb67c3e16"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1318d42610c26dcd68bd3279a1bf9e3605377260867c9a8ed22eafc1bd93a7c"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5fa6e3c6e0333051c1f3a49f0807b3366f4131c8d6ac8c3e05fd0d0ce3755c"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fcf79b686962d7bec458a0babc904cb4fa319808805e036b9d5a531ee6b9b835"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8b01153c7466d0bad48fba77a303d5a768e66f24b763853469f47220b3de4661"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:94baaeea0b4f8632a6da69348b1e741043eba18d4e3088d674d3f76586b6223d"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6c5b32875646cb7f60c193ade99b2e4b124f19583492115293cd00f6fb198b17"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:110b6294396bc0a447648627479c9320f095c2034c0537f687592e0f58622638"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-win32.whl", hash = "sha256:3445a35c4c8d288f2b2011eb61bce1227c633ce85a3154e727170f37c0266bb2"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:0d1415a732ee75e74a90af12020b77a0b396b36c60afae1bde3208a78cd2c9fc"}, + {file = "rapidfuzz-3.9.7-cp310-cp310-win_arm64.whl", hash = "sha256:836f4d88b8bd0fff2ebe815dcaab8aa6c8d07d1d566a7e21dd137cf6fe11ed5b"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d098ce6162eb5e48fceb0745455bc950af059df6113eec83e916c129fca11408"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:048d55d36c02c6685a2b2741688503c3d15149694506655b6169dcfd3b6c2585"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c33211cfff9aec425bb1bfedaf94afcf337063aa273754f22779d6dadebef4c2"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6d9db2fa4e9be171e9bb31cf2d2575574774966b43f5b951062bb2e67885852"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4e049d5ad61448c9a020d1061eba20944c4887d720c4069724beb6ea1692507"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cfa74aac64c85898b93d9c80bb935a96bf64985e28d4ee0f1a3d1f3bf11a5106"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965693c2e9efd425b0f059f5be50ef830129f82892fa1858e220e424d9d0160f"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8501000a5eb8037c4b56857724797fe5a8b01853c363de91c8d0d0ad56bef319"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d92c552c6b7577402afdd547dcf5d31ea6c8ae31ad03f78226e055cfa37f3c6"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1ee2086f490cb501d86b7e386c1eb4e3a0ccbb0c99067089efaa8c79012c8952"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1de91e7fd7f525e10ea79a6e62c559d1b0278ec097ad83d9da378b6fab65a265"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4da514d13f4433e16960a17f05b67e0af30ac771719c9a9fb877e5004f74477"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-win32.whl", hash = "sha256:a40184c67db8252593ec518e17fb8a6e86d7259dc9f2d6c0bf4ff4db8cf1ad4b"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:c4f28f1930b09a2c300357d8465b388cecb7e8b2f454a5d5425561710b7fd07f"}, + {file = "rapidfuzz-3.9.7-cp311-cp311-win_arm64.whl", hash = "sha256:675b75412a943bb83f1f53e2e54fd18c80ef15ed642dc6eb0382d1949419d904"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1ef6a1a8f0b12f8722f595f15c62950c9a02d5abc64742561299ffd49f6c6944"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32532af1d70c6ec02ea5ac7ee2766dfff7c8ae8c761abfe8da9e527314e634e8"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1a38bade755aa9dd95a81cda949e1bf9cd92b79341ccc5e2189c9e7bdfc5ec"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73ee2df41224c87336448d279b5b6a3a75f36e41dd3dcf538c0c9cce36360d8"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be3a1fc3e2ab3bdf93dc0c83c00acca8afd2a80602297d96cf4a0ba028333cdf"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603f48f621272a448ff58bb556feb4371252a02156593303391f5c3281dfaeac"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:268f8e1ca50fc61c0736f3fe9d47891424adf62d96ed30196f30f4bd8216b41f"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f8bf3f0d02935751d8660abda6044821a861f6229f7d359f98bcdcc7e66c39b"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b997ff3b39d4cee9fb025d6c46b0a24bd67595ce5a5b652a97fb3a9d60beb651"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca66676c8ef6557f9b81c5b2b519097817a7c776a6599b8d6fcc3e16edd216fe"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:35d3044cb635ca6b1b2b7b67b3597bd19f34f1753b129eb6d2ae04cf98cd3945"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a93c9e60904cb76e7aefef67afffb8b37c4894f81415ed513db090f29d01101"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-win32.whl", hash = "sha256:579d107102c0725f7c79b4e79f16d3cf4d7c9208f29c66b064fa1fd4641d5155"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:953b3780765c8846866faf891ee4290f6a41a6dacf4fbcd3926f78c9de412ca6"}, + {file = "rapidfuzz-3.9.7-cp312-cp312-win_arm64.whl", hash = "sha256:7c20c1474b068c4bd45bf2fd0ad548df284f74e9a14a68b06746c56e3aa8eb70"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fde81b1da9a947f931711febe2e2bee694e891f6d3e6aa6bc02c1884702aea19"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47e92c155a14f44511ea8ebcc6bc1535a1fe8d0a7d67ad3cc47ba61606df7bcf"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8772b745668260c5c4d069c678bbaa68812e6c69830f3771eaad521af7bc17f8"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578302828dd97ee2ba507d2f71d62164e28d2fc7bc73aad0d2d1d2afc021a5d5"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc3e6081069eea61593f1d6839029da53d00c8c9b205c5534853eaa3f031085c"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b1c2d504eddf97bc0f2eba422c8915576dbf025062ceaca2d68aecd66324ad9"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb76e5a21034f0307c51c5a2fc08856f698c53a4c593b17d291f7d6e9d09ca3"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d4ba2318ef670ce505f42881a5d2af70f948124646947341a3c6ccb33cd70369"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:057bb03f39e285047d7e9412e01ecf31bb2d42b9466a5409d715d587460dd59b"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a8feac9006d5c9758438906f093befffc4290de75663dbb2098461df7c7d28dd"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95b8292383e717e10455f2c917df45032b611141e43d1adf70f71b1566136b11"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e9fbf659537d246086d0297628b3795dc3e4a384101ecc01e5791c827b8d7345"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-win32.whl", hash = "sha256:1dc516ac6d32027be2b0196bedf6d977ac26debd09ca182376322ad620460feb"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-win_amd64.whl", hash = "sha256:b4f86e09d3064dca0b014cd48688964036a904a2d28048f00c8f4640796d06a8"}, + {file = "rapidfuzz-3.9.7-cp313-cp313-win_arm64.whl", hash = "sha256:19c64d8ddb2940b42a4567b23f1681af77f50a5ff6c9b8e85daba079c210716e"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbda3dd68d8b28ccb20ffb6f756fefd9b5ba570a772bedd7643ed441f5793308"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2379e0b2578ad3ac7004f223251550f08bca873ff76c169b09410ec562ad78d8"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d1eff95362f993b0276fd3839aee48625b09aac8938bb0c23b40d219cba5dc5"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd9360e30041690912525a210e48a897b49b230768cc8af1c702e5395690464f"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a93cd834b3c315ab437f0565ee3a2f42dd33768dc885ccbabf9710b131cf70d2"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff196996240db7075f62c7bc4506f40a3c80cd4ae3ab0e79ac6892283a90859"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948dcee7aaa1cd14358b2a7ef08bf0be42bf89049c3a906669874a715fc2c937"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95751f505a301af1aaf086c19f34536056d6c8efa91b2240de532a3db57b543"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:90db86fa196eecf96cb6db09f1083912ea945c50c57188039392d810d0b784e1"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:3171653212218a162540a3c8eb8ae7d3dcc8548540b69eaecaf3b47c14d89c90"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:36dd6e820379c37a1ffefc8a52b648758e867cd9d78ee5b5dc0c9a6a10145378"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7b702de95666a1f7d5c6b47eacadfe2d2794af3742d63d2134767d13e5d1c713"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-win32.whl", hash = "sha256:9030e7238c0df51aed5c9c5ed8eee2bdd47a2ae788e562c1454af2851c3d1906"}, + {file = "rapidfuzz-3.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:f847fb0fbfb72482b1c05c59cbb275c58a55b73708a7f77a83f8035ee3c86497"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:97f2ce529d2a70a60c290f6ab269a2bbf1d3b47b9724dccc84339b85f7afb044"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2957fdad10bb83b1982b02deb3604a3f6911a5e545f518b59c741086f92d152"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d5262383634626eb45c536017204b8163a03bc43bda880cf1bdd7885db9a163"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:364587827d7cbd41afa0782adc2d2d19e3f07d355b0750a02a8e33ad27a9c368"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecc24af7f905f3d6efb371a01680116ffea8d64e266618fb9ad1602a9b4f7934"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dc86aa6b29d174713c5f4caac35ffb7f232e3e649113e8d13812b35ab078228"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3dcfbe7266e74a707173a12a7b355a531f2dcfbdb32f09468e664330da14874"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b23806fbdd6b510ba9ac93bb72d503066263b0fba44b71b835be9f063a84025f"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5551d68264c1bb6943f542da83a4dc8940ede52c5847ef158698799cc28d14f5"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:13d8675a1fa7e2b19650ca7ef9a6ec01391d4bb12ab9e0793e8eb024538b4a34"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9b6a5de507b9be6de688dae40143b656f7a93b10995fb8bd90deb555e7875c60"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:111a20a3c090cf244d9406e60500b6c34b2375ba3a5009e2b38fd806fe38e337"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-win32.whl", hash = "sha256:22589c0b8ccc6c391ce7f776c93a8c92c96ab8d34e1a19f1bd2b12a235332632"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:6f83221db5755b8f34222e40607d87f1176a8d5d4dbda4a55a0f0b67d588a69c"}, + {file = "rapidfuzz-3.9.7-cp39-cp39-win_arm64.whl", hash = "sha256:3665b92e788578c3bb334bd5b5fa7ee1a84bafd68be438e3110861d1578c63a0"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d7df9c2194c7ec930b33c991c55dbd0c10951bd25800c0b7a7b571994ebbced5"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68bd888eafd07b09585dcc8bc2716c5ecdb7eed62827470664d25588982b2873"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1230e0f9026851a6a432beaa0ce575dda7b39fe689b576f99a0704fbb81fc9c"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b36e1c61b796ae1777f3e9e11fd39898b09d351c9384baf6e3b7e6191d8ced"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dba13d86806fcf3fe9c9919f58575e0090eadfb89c058bde02bcc7ab24e4548"}, + {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1f1a33e84056b7892c721d84475d3bde49a145126bc4c6efe0d6d0d59cb31c29"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3492c7a42b7fa9f0051d7fcce9893e95ed91c97c9ec7fb64346f3e070dd318ed"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:ece45eb2af8b00f90d10f7419322e8804bd42fb1129026f9bfe712c37508b514"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcd14cf4876f04b488f6e54a7abd3e9b31db5f5a6aba0ce90659917aaa8c088"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:521c58c72ed8a612b25cda378ff10dee17e6deb4ee99a070b723519a345527b9"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18669bb6cdf7d40738526d37e550df09ba065b5a7560f3d802287988b6cb63cf"}, + {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7abe2dbae81120a64bb4f8d3fcafe9122f328c9f86d7f327f174187a5af4ed86"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a3c0783910911f4f24655826d007c9f4360f08107410952c01ee3df98c713eb2"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:03126f9a040ff21d2a110610bfd6b93b79377ce8b4121edcb791d61b7df6eec5"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591908240f4085e2ade5b685c6e8346e2ed44932cffeaac2fb32ddac95b55c7f"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9012d86c6397edbc9da4ac0132de7f8ee9d6ce857f4194d5684c4ddbcdd1c5c"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df596ddd3db38aa513d4c0995611267b3946e7cbe5a8761b50e9306dfec720ee"}, + {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3ed5adb752f4308fcc8f4fb6f8eb7aa4082f9d12676fda0a74fa5564242a8107"}, + {file = "rapidfuzz-3.9.7.tar.gz", hash = "sha256:f1c7296534c1afb6f495aa95871f14ccdc197c6db42965854e483100df313030"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "responses" +version = "0.25.3" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, + {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "s3transfer" +version = "0.10.2" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.8" +files = [ + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "werkzeug" +version = "3.0.4" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "0e692506451d9e6684947b2de9274d017d0f0bdc2f384edc221fd0077473e720" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..e4f16c1 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,45 @@ +[tool.poetry] +name = "podaac-hitide-backfill-tool" +version = "0.9.0-rc.8" + +description = "Backfill footprints and images for HiTIDE collections" +authors = ["PO.DAAC "] +license = "Apache-2.0" +readme = "README.md" +repository = "https://github.com/podaac/hitide-backfill-tool" +classifiers = [ + "Topic :: Software Development :: Libraries :: Python Modules" +] +exclude = ['contrib', 'docs', 'tests', 'jenkins'] +packages = [ + { include = "podaac" }, +] + +[tool.poetry.dependencies] +python = "^3.10" +requests = "^2.27.1" +boto3 = "^1.35.0" +pyyaml = "^6.0" +fuzzywuzzy = "^0.18.0" +python-Levenshtein = "^0.25.0" + +[tool.poetry.dev-dependencies] +pytest = "^8.0.1" +moto = {extras = ["sns"], version = "^4.2.14"} +flake8 = "^7.0.0" +pylint = "^3.0.3" +pytest-cov = "^5" + +[tool.poetry.scripts] +backfill = "podaac.hitide_backfill_tool.cli:main" +backfill_replay = "podaac.hitide_backfill_tool.replay:main" +backfill_regression = "podaac.hitide_backfill_tool.regression:main" +backfill_memory_profiler = "podaac.hitide_backfill_tool.memory_profiler:main" +"create-backfill-config" = "podaac.hitide_backfill_tool.config_creator:create_config" + +[tool.pytest.ini_options] +markers = [ "e2e" ] + +[build-system] +requires = ["poetry"] +build-backend = "poetry.masonry.api" diff --git a/terraform-deploy/backfill_lambdas.tf b/terraform-deploy/backfill_lambdas.tf new file mode 100644 index 0000000..91c827f --- /dev/null +++ b/terraform-deploy/backfill_lambdas.tf @@ -0,0 +1,56 @@ +module backfill_lambdas{ + + source = "https://github.com/podaac/hitide-backfill-lambdas/releases/download/0.1.0%2B0993027/hitide-backfill-lambdas-0.1.0+0993027.zip" + lambda_role = aws_iam_role.iam_execution.arn + layers = [aws_lambda_layer_version.cumulus_message_adapter.arn] + prefix = local.resources_name + forge_step_arn = aws_sfn_state_machine.forge.arn + tig_step_arn = aws_sfn_state_machine.tig.arn + dmrpp_step_arn = aws_sfn_state_machine.dmrpp.arn + sqs_url = aws_sqs_queue.sqs-queue.url + region = var.region + aws_profile = var.profile + subnet_ids = data.aws_subnets.private.ids + security_group_ids = concat([var.aws_security_group_ids],[aws_security_group.db.id]) + step_retry = var.step_retry + message_visibility_timeout = var.message_visibility_timeout + reserved_concurrent_executions = 1 + timeout = 900 + + #db variables + user_name = aws_ssm_parameter.db_user.name + user_pass = aws_ssm_parameter.db_user_pass.name + root_user = aws_ssm_parameter.db_admin.name + root_pass = aws_ssm_parameter.db_admin_pass.name + db_host = aws_ssm_parameter.db_host.name + db_name = aws_ssm_parameter.db_name.name + + ssm_throttle_limit = aws_ssm_parameter.throttle_limit.name +} + +resource "aws_cloudwatch_event_rule" "every_one_minutes" { + name = "${local.resources_name}-every-one-minutes" + description = "Fires every one minutes" + schedule_expression = "cron(* * * * ? *)" +} + +resource "aws_cloudwatch_event_target" "backfill_sqs_to_step_every_five_minutes" { + rule = aws_cloudwatch_event_rule.every_one_minutes.name + arn = module.backfill_lambdas.backfill_sqs_to_step_task_arn +} + +resource "aws_lambda_permission" "allow_cloudwatch_to_call_backfill_sqs_to_step" { + statement_id = "AllowExecutionFromCloudWatch" + action = "lambda:InvokeFunction" + function_name = module.backfill_lambdas.backfill_sqs_to_step_function_name + principal = "events.amazonaws.com" + source_arn = aws_cloudwatch_event_rule.every_one_minutes.arn +} + +resource "aws_ssm_parameter" "throttle_limit" { + name = "${local.resources_name}-throttle-limit" + type = "String" + value = var.throttle_limit + tags = local.default_tags + overwrite = true +} diff --git a/terraform-deploy/bin/config.sh b/terraform-deploy/bin/config.sh new file mode 100644 index 0000000..95d3b29 --- /dev/null +++ b/terraform-deploy/bin/config.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +set -eo pipefail + +if [ ! $# -eq 1 ] +then + echo "usage: $(caller | cut -d' ' -f2) venue" + exit 1 +fi + +VENUE=$1 +source "$(dirname $BASH_SOURCE)/../environments/$VENUE.env" + +export TF_IN_AUTOMATION=true # https://www.terraform.io/cli/config/environment-variables#tf_in_automation +export TF_INPUT=false # https://www.terraform.io/cli/config/environment-variables#tf_input + +export TF_VAR_region="$REGION" +export TF_VAR_stage="$VENUE" + +terraform init -reconfigure -backend-config="bucket=$BUCKET" -backend-config="region=$REGION" \ No newline at end of file diff --git a/terraform-deploy/cloudwatch_event_post_sqs.tf b/terraform-deploy/cloudwatch_event_post_sqs.tf new file mode 100644 index 0000000..0f62cf9 --- /dev/null +++ b/terraform-deploy/cloudwatch_event_post_sqs.tf @@ -0,0 +1,76 @@ +locals { + post-sqs-name = "${local.resources_name}-post-step-sqs" +} + +resource "aws_cloudwatch_event_rule" "state_machine_execution_rule" { + name = "${local.resources_name}-cloudwatch-rule" + event_pattern = jsonencode({ + source = ["aws.states"] + detail-type = ["Step Functions Execution Status Change"] + detail = { + status = [ + "FAILED", + "SUCCEEDED" + ], + stateMachineArn = [aws_sfn_state_machine.forge.id, aws_sfn_state_machine.tig.id, aws_sfn_state_machine.dmrpp.id] + } + }) +} + +resource "aws_sqs_queue" "post-step-dead-letter-queue" { + name = "${local.resources_name}-post-step-dead-letter-queue" + message_retention_seconds = 1209600 +} + +# Create the SQS queue and assign the queue policy to it +resource "aws_sqs_queue" "post-step-queue" { + name = local.post-sqs-name + visibility_timeout_seconds = 900 + + redrive_policy = jsonencode({ + deadLetterTargetArn = aws_sqs_queue.post-step-dead-letter-queue.arn + maxReceiveCount = 3 + }) + + redrive_allow_policy = jsonencode({ + redrivePermission = "byQueue", + sourceQueueArns = [aws_sqs_queue.post-step-dead-letter-queue.arn] + }) + +} + +resource "aws_lambda_event_source_mapping" "post_step_event_source_mapping" { + event_source_arn = aws_sqs_queue.post-step-queue.arn + enabled = true + function_name = module.backfill_lambdas.backfill_post_step_task_arn + batch_size = 1 +} + +resource "aws_cloudwatch_event_target" "cloudwatch_event_target" { + rule = aws_cloudwatch_event_rule.state_machine_execution_rule.name + arn = aws_sqs_queue.post-step-queue.arn +} + +data "aws_iam_policy_document" "sqs_from_cloudwatch_event" { + statement { + sid = "AllowQueueFromCloudwatchEvent" + actions = ["sqs:SendMessage"] + principals { + type = "Service" + identifiers = ["events.amazonaws.com"] + } + resources = [aws_sqs_queue.post-step-queue.arn] + + condition { + test = "ArnEquals" + variable = "aws:SourceArn" + + values = [aws_cloudwatch_event_rule.state_machine_execution_rule.arn] + } + } +} + +resource "aws_sqs_queue_policy" "post_sqs" { + queue_url = aws_sqs_queue.post-step-queue.id + policy = data.aws_iam_policy_document.sqs_from_cloudwatch_event.json +} \ No newline at end of file diff --git a/terraform-deploy/database.tf b/terraform-deploy/database.tf new file mode 100644 index 0000000..b4adc49 --- /dev/null +++ b/terraform-deploy/database.tf @@ -0,0 +1,121 @@ +resource "aws_security_group" "db" { + description = "controls access to the database" + + vpc_id = data.aws_vpc.default.id + name = "${local.resources_name}-db-sg" + tags = local.default_tags +} + +resource "aws_security_group_rule" "allow_self_in" { + type = "ingress" + security_group_id = aws_security_group.db.id + protocol = "tcp" + from_port = 3306 + to_port = 3306 + self = true +} + +resource "aws_security_group_rule" "allow_all_out" { + type = "egress" + security_group_id = aws_security_group.db.id + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = [ + "0.0.0.0/0", + ] +} + +resource "aws_db_subnet_group" "default" { + name = "${local.resources_name}-subnet" + subnet_ids = data.aws_subnets.private.ids + + tags = local.default_tags +} + +resource "random_password" "db_admin_pass" { + length = 16 + special = false +} +resource "random_password" "db_user_pass" { + length = 16 + special = false +} + + +## RDS Database +resource "aws_db_instance" "database" { + identifier = "${local.resources_name}-rds" + allocated_storage = var.db_storage_size + storage_type = "gp2" + engine = "mysql" + engine_version = "5.7" + instance_class = var.db_instance_class + db_name = "hitidebackfilldb" + username = "hitidebackfilldbadmin" + password = random_password.db_admin_pass.result + parameter_group_name = "default.mysql5.7" + multi_az = "true" + vpc_security_group_ids = [aws_security_group.db.id] + db_subnet_group_name = aws_db_subnet_group.default.id + skip_final_snapshot = true + tags = local.default_tags + apply_immediately = true +} + + +resource "aws_ssm_parameter" "db_admin" { + name = "${local.resources_name}-db-admin" + type = "String" + value = aws_db_instance.database.username + tags = local.default_tags + overwrite = true +} + +resource "aws_ssm_parameter" "db_admin_pass" { + name = "${local.resources_name}-db-admin-pass" + type = "SecureString" + value = aws_db_instance.database.password + tags = local.default_tags + overwrite = true +} + +resource "aws_ssm_parameter" "db_user" { + name = "${local.resources_name}-db-user" + type = "String" + value = "hitidebackfilluser" + tags = local.default_tags + overwrite = true +} + +resource "aws_ssm_parameter" "db_user_pass" { + name = "${local.resources_name}-db-user-pass" + type = "SecureString" + value = random_password.db_user_pass.result + tags = local.default_tags + overwrite = true +} + +resource "aws_ssm_parameter" "db_host" { + name = "${local.resources_name}-db-host" + type = "String" + value = aws_db_instance.database.address + tags = local.default_tags + overwrite = true +} + +resource "aws_ssm_parameter" "db_name" { + name = "${local.resources_name}-db_name" + type = "String" + value = aws_db_instance.database.db_name + tags = local.default_tags + overwrite = true +} + +resource "aws_ssm_parameter" "db_sg" { + name = "${local.resources_name}-db-sg" + type = "String" + value = aws_security_group.db.id + tags = local.default_tags + overwrite = true +} \ No newline at end of file diff --git a/terraform-deploy/dmrpp.tf b/terraform-deploy/dmrpp.tf new file mode 100644 index 0000000..473d8cf --- /dev/null +++ b/terraform-deploy/dmrpp.tf @@ -0,0 +1,23 @@ +module "dmrpp-generator" { + + // Required parameters + source = "https://github.com/ghrcdaac/dmrpp-generator/releases/download/v5.0.1/dmrpp-generator.zip" + cluster_arn = aws_ecs_cluster.default.arn + log_destination_arn = var.log_destination_arn + region = var.region + prefix = var.prefix + docker_image = "${var.dmrpp_url}:v5.0.1" + // Optional parameters + cpu = var.dmrpp_ecs_cpu // default to 512 + memory_reservation = var.dmrpp_memory_reservation // default to 512Mb + desired_count = var.dmrpp_desired_count // Default to 1 + get_dmrpp_timeout = 600 // Set dmrpp process timeout to 10 minute + + // Optional Lambda Specific Configuration + cumulus_lambda_role_arn = aws_iam_role.iam_execution.arn // If provided the lambda will be provisioned + timeout = 900 + memory_size = 256 + ephemeral_storage = 4096 + + } + \ No newline at end of file diff --git a/terraform-deploy/dmrpp_workflow.tf b/terraform-deploy/dmrpp_workflow.tf new file mode 100644 index 0000000..be64ddb --- /dev/null +++ b/terraform-deploy/dmrpp_workflow.tf @@ -0,0 +1,371 @@ +resource "aws_sfn_state_machine" "dmrpp" { + name = "${local.resources_name}-dmrpp" + role_arn = aws_iam_role.step.arn + + definition = <> /etc/sysconfig/docker-storage + + sed -i '/^\s*OPTIONS=/d' /etc/sysconfig/docker + echo 'OPTIONS="--default-ulimit nofile=1024:4096 --data-root=/docker-data"' >> /etc/sysconfig/docker + +%{ if include_docker_cleanup_cronjob == true ~} + echo '* * * * * sudo sh -c "docker ps -q | xargs docker inspect --format='\{{.State.Pid}}' | xargs -IZ fstrim /proc/Z/root/"' | crontab - +%{ endif ~} + + --==BOUNDARY== + Content-Type: text/x-shellscript; charset="us-ascii" + +%{ if efs_dns_name != null && efs_mount_point != null ~} + AZ=$(curl http://169.254.169.254/latest/meta-data/placement/availability-zone) + + if ! rpm -q nfs-utils >/dev/null 2>&1; then + yum install -y nfs-utils + fi + + mkdir -p ${efs_mount_point} + mount -t nfs4 -o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2 ${efs_dns_name}:/ ${efs_mount_point} + chmod 777 ${efs_mount_point} + + service docker restart + +%{ endif ~} + cat <<'EOF' >> /etc/ecs/ecs.config + ECS_CLUSTER=${cluster_name} + ECS_ENGINE_TASK_CLEANUP_WAIT_DURATION=1m + ECS_CONTAINER_STOP_TIMEOUT=${container_stop_timeout} + EOF + +%{ if docker_hub_config != null ~} + echo ECS_ENGINE_AUTH_TYPE=docker >> /etc/ecs/ecs.config + echo 'ECS_ENGINE_AUTH_DATA={"https://index.docker.io/v1/":{"username":"${docker_hub_config.username}","password": "${docker_hub_config.password}","email":"${docker_hub_config.email}"}}' >> /etc/ecs/ecs.config + +%{ endif ~} + if ! which aws >/dev/null 2>&1; then + yum install -y jq unzip + curl "https://s3.amazonaws.com/aws-cli/awscli-bundle.zip" -o "awscli-bundle.zip" + unzip awscli-bundle.zip + ./awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws + rm -rf ./awscli-bundle awscli-bundle.zip + fi + + aws s3 cp s3://${task_reaper_object.bucket}/${task_reaper_object.key} /usr/local/bin/task-reaper.sh + chmod +x /usr/local/bin/task-reaper.sh + echo "$(echo '0,30 * * * * /usr/sbin/logrotate -v /etc/logrotate.conf' ; crontab -l)" | crontab - + sed -i 's/size.*/size 100M\n dateformat -%Y%m%d%s\n copytruncate/' /etc/logrotate.d/awslogs + sed -i 's/rotate 4/rotate 2/' /etc/logrotate.d/awslogs + cat <<'EOF' >> /etc/cron.d/task-reaper + PATH=/bin:/usr/local/bin + AWS_DEFAULT_REGION=${region} + LIFECYCLE_HOOK_NAME=${cluster_name}-ecs-termination-hook + * * * * * root /usr/local/bin/task-reaper.sh >> /var/log/task-reaper.log 2>&1 + EOF + + --==BOUNDARY==-- + + AutoScalingGroup: + Type: AWS::AutoScaling::AutoScalingGroup + UpdatePolicy: + AutoScalingRollingUpdate: + MinInstancesInService: ${min_size} + Properties: + VPCZoneIdentifier: +%{ for s in subnet_ids ~} + - ${s} +%{ endfor ~} + LaunchConfigurationName: !Ref LaunchConfiguration + MinSize: ${min_size} + DesiredCapacity: ${desired_capacity} + MaxSize: ${max_size} + Tags: + - Key: Name + Value: ${cluster_name} + PropagateAtLaunch: true +Outputs: + AutoscalingGroupName: + Value: !Ref AutoScalingGroup \ No newline at end of file diff --git a/terraform-deploy/environments/ops.env b/terraform-deploy/environments/ops.env new file mode 100644 index 0000000..c36075e --- /dev/null +++ b/terraform-deploy/environments/ops.env @@ -0,0 +1,2 @@ +export REGION=us-west-2 +export BUCKET=podaac-services-ops-terraform \ No newline at end of file diff --git a/terraform-deploy/environments/sit.env b/terraform-deploy/environments/sit.env new file mode 100644 index 0000000..6aae1b2 --- /dev/null +++ b/terraform-deploy/environments/sit.env @@ -0,0 +1,2 @@ +export REGION=us-west-2 +export BUCKET=podaac-services-sit-terraform \ No newline at end of file diff --git a/terraform-deploy/environments/uat.env b/terraform-deploy/environments/uat.env new file mode 100644 index 0000000..5ded9f8 --- /dev/null +++ b/terraform-deploy/environments/uat.env @@ -0,0 +1,2 @@ +export REGION=us-west-2 +export BUCKET=podaac-services-uat-terraform \ No newline at end of file diff --git a/terraform-deploy/fargate.tf b/terraform-deploy/fargate.tf new file mode 100644 index 0000000..ce074ff --- /dev/null +++ b/terraform-deploy/fargate.tf @@ -0,0 +1,131 @@ +resource "aws_ecs_cluster" "main" { + name = "${var.prefix}-fargate-ecs-cluster" +} + +data "aws_iam_policy_document" "fargate_assume_role_policy" { + statement { + effect = "Allow" + actions = ["sts:AssumeRole"] + + principals { + type = "Service" + identifiers = ["ecs-tasks.amazonaws.com"] + } + } +} + +data "aws_iam_policy_document" "fargate_policy" { + + statement { + actions = [ + "autoscaling:CompleteLifecycleAction", + "autoscaling:DescribeAutoScalingInstances", + "autoscaling:DescribeLifecycleHooks", + "autoscaling:RecordLifecycleActionHeartbeat", + "cloudformation:DescribeStacks", + "cloudwatch:GetMetricStatistics", + "dynamodb:ListTables", + "ec2:CreateNetworkInterface", + "ec2:DeleteNetworkInterface", + "ec2:DescribeInstances", + "ec2:DescribeNetworkInterfaces", + "ecr:BatchCheckLayerAvailability", + "ecr:BatchGetImage", + "ecr:GetAuthorizationToken", + "ecr:GetDownloadUrlForLayer", + "ecs:DeregisterContainerInstance", + "ecs:DescribeClusters", + "ecs:DescribeContainerInstances", + "ecs:DescribeServices", + "ecs:DiscoverPollEndpoint", + "ecs:ListContainerInstances", + "ecs:ListServices", + "ecs:ListTaskDefinitions", + "ecs:ListTasks", + "ecs:Poll", + "ecs:RegisterContainerInstance", + "ecs:RunTask", + "ecs:StartTelemetrySession", + "ecs:Submit*", + "ecs:UpdateContainerInstancesState", + "events:DeleteRule", + "events:DescribeRule", + "events:DisableRule", + "events:EnableRule", + "events:ListRules", + "events:PutRule", + "kinesis:DescribeStream", + "kinesis:GetRecords", + "kinesis:GetShardIterator", + "kinesis:ListStreams", + "kinesis:PutRecord", + "lambda:GetFunction", + "lambda:GetLayerVersion", + "lambda:invokeFunction", + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:DescribeLogStreams", + "logs:PutLogEvents", + "s3:ListAllMyBuckets", + "sns:List*", + "sns:publish", + "ssm:GetParameter", + "states:DescribeActivity", + "states:DescribeExecution", + "states:GetActivityTask", + "states:GetExecutionHistory", + "states:ListStateMachines", + "states:SendTaskFailure", + "states:SendTaskSuccess", + "states:StartExecution", + "states:StopExecution" + ] + resources = ["*"] + } + + statement { + actions = [ + "s3:AbortMultipartUpload", + "s3:DeleteObject", + "s3:DeleteObjectVersion", + "s3:GetAccelerateConfiguration", + "s3:GetBucket*", + "s3:GetLifecycleConfiguration", + "s3:GetObject*", + "s3:GetReplicationConfiguration", + "s3:ListBucket*", + "s3:ListMultipartUploadParts", + "s3:PutAccelerateConfiguration", + "s3:PutBucket*", + "s3:PutLifecycleConfiguration", + "s3:PutObject*", + "s3:PutReplicationConfiguration" + ] + resources = ["arn:aws:s3:::*"] + } + + statement { + actions = [ + "sqs:SendMessage", + "sqs:ReceiveMessage", + "sqs:ChangeMessageVisibility", + "sqs:DeleteMessage", + "sqs:GetQueueUrl", + "sqs:GetQueueAttributes", + ] + resources = ["arn:aws:sqs:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:*"] + } + +} + +resource "aws_iam_role" "fargate_execution" { + name = "${local.resources_name}-fargate_execution-role" + assume_role_policy = data.aws_iam_policy_document.fargate_assume_role_policy.json + permissions_boundary = "arn:aws:iam::${data.aws_caller_identity.current.account_id}:policy/NGAPShRoleBoundary" +} + +resource "aws_iam_role_policy" "fargate_policy_attachment" { + name = "${var.prefix}_fargate_ecs_cluster_instance_policy" + role = aws_iam_role.fargate_execution.id + policy = data.aws_iam_policy_document.fargate_policy.json +} \ No newline at end of file diff --git a/terraform-deploy/forge.tf b/terraform-deploy/forge.tf new file mode 100644 index 0000000..9b77c4e --- /dev/null +++ b/terraform-deploy/forge.tf @@ -0,0 +1,29 @@ +module "forge_module" { + + source = "https://github.com/podaac/forge/releases/download/0.11.0-rc.2/forge-0.11.0-rc.2.zip" + prefix = local.resources_name + region = var.region + cmr_environment = var.cmr_environment + config_url = "https://hitide.podaac.earthdatacloud.nasa.gov/dataset-configs" + footprint_output_bucket = "${local.resources_name}-internal" + footprint_output_dir = "dataset-metadata" + lambda_role = aws_iam_role.iam_execution.arn + layers = [aws_lambda_layer_version.cumulus_message_adapter.arn] + security_group_ids = [var.aws_security_group_ids] + subnet_ids = data.aws_subnets.private.ids + memory_size = 512 + timeout = 900 + profile = var.profile + + # ECS Variables + cluster_arn = aws_ecs_cluster.main.arn + + # Fargate Variables + forge_fargate = true + fargate_memory = 512 + fargate_cpu = 256 + fargate_iam_role = aws_iam_role.fargate_execution.arn + ecs_cluster_name = aws_ecs_cluster.main.name + lambda_container_image_uri = "ghcr.io/podaac/forge:0.11.0-rc.2" + fargate_max_capacity = 100 +} diff --git a/terraform-deploy/forge_py.tf b/terraform-deploy/forge_py.tf new file mode 100644 index 0000000..47f8f86 --- /dev/null +++ b/terraform-deploy/forge_py.tf @@ -0,0 +1,15 @@ +module "forge_py_module" { + source = "https://github.com/podaac/forge-py/releases/download/0.1.0/forge-py-terraform-0.1.0.zip" + lambda_container_image_uri = "ghcr.io/podaac/forge-py:0.1.0" + prefix = local.resources_name + region = var.region + cmr_environment = var.cmr_environment + config_url = "https://hitide.podaac.earthdatacloud.nasa.gov/dataset-configs" + footprint_output_bucket = "${local.resources_name}-internal" + footprint_output_dir = "dataset-metadata" + lambda_role = aws_iam_role.iam_execution.arn + security_group_ids = [var.aws_security_group_ids] + subnet_ids = data.aws_subnets.private.ids + memory_size = 1024 + timeout = 900 +} diff --git a/terraform-deploy/forge_step.tf b/terraform-deploy/forge_step.tf new file mode 100644 index 0000000..77f2cee --- /dev/null +++ b/terraform-deploy/forge_step.tf @@ -0,0 +1,385 @@ +resource "aws_sfn_state_machine" "forge" { + name = "${local.resources_name}-forge" + role_arn = aws_iam_role.step.arn + + definition = < + + + + + + + + + + + + + + + + Physical temperature of reflector from L1B files. Pol basis V,H + + + Kelvin + + + 0. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + TA of reflected galaxy intrusion. Pol basis I,Q,S3 + + + Kelvin + + + 0. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + estimated sea ice fraction weighted by antenna gain + + + sea_ice_area_fraction + + + 1 + + + 0. + + + 1. + + + -9999. + + + 1. + + + 0. + + + referenceInformation + + + 360 780 + + + + + + + + + + + + Sun zenith angle in S/C coordinate system + + + degree + + + 0. + + + 180. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + Latitudinal (polar) angle of specular reflection ray in ECI2000 + + + degree + + + -90. + + + 90. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + + Brightness temperature at rough ocean surface BEFORE applying roughness correction. Pol basis V,H,S3,S4 + + + brightness_temperature + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + TA land correction applied at TOA. Pol basis V,H + + + Kelvin + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + Ancillary sea surface wind speed from CCMP NRT that is used in surface roughness correction + + + wind_speed + + + m s-1 + + + 0. + + + 100. + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 + + + + + + + + + + + + total formal uncertainty estimate of SMAP sea surface salinity smoothed to approx 70km resolution + + + 1e-3 + + + 0. + + + 45. + + + -9999. + + + 1. + + + 0. + + + qualityInformation + + + 360 780 1 + + + + + + + + + + + + + + + + SMAP sea surface salinity smoothed to approx 70km resolution + + + sea_surface_salinity + + + 1e-3 + + + 0. + + + 45. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + + Brightness temperature at flat ocean surface after applying roughness correction and sea-ice correction. Pol basis V,H,S3,S4 + + + brightness_temperature + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Brightness temperature at top of atmosphere AFTER applying land correction. Pol basis V,H,S3,S4 + + + toa_brightness_temperature + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + TA of reflected sun intrusion. Pol basis I,Q,S3 + + + Kelvin + + + 0. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Scan angle: +90 is left of forward. +270 is right of forward. + + + platform_azimuth_angle + + + degree + + + 0. + + + 360. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + Expected total polarization basis rotation angle: Faraday rotation (from external TEC) + geometric + + + degree + + + -90. + + + 90. + + + -9999. + + + 1. + + + 0. + + + referenceInformation + + + 360 780 1 + + + + + + + + + + + + + + + + + TA of direct galaxy intrusion. Pol basis I,Q,S3 + + + Kelvin + + + 0. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + unfiltered antenna temperature. Pol basis V,H,S3,S4 + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Empirical correction to physical temperature of reflector. Pol basis V,H + + + Kelvin + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + Time of observation + + + time + + + T + + + seconds since 2000-1-1 0:0:0 0 + + + 1. + + + 0. + + + 0. + + + standard + + + coordinate + + + 360 780 1 + + + + + + + + + + + + + + + Total atmospheric transmittance + + + 1 + + + 0. + + + 1. + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 + + + + + + + + + + + + TB sea-ice correction applied at TB SUR 0. Pol basis V,H + + + Kelvin + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + Earth incidence angle + + + angle_of_incidence + + + degree + + + 0. + + + 90. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + Ancillary wind direction relative to North (from NCEP, meteorological convention) that is used in surface roughness correction + + + wind_from_direction + + + degree + + + 0. + + + 360. + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 + + + + + + + + + + + + + Antenna temperature after correcting for emissive reflector and TND adjustment. Pol basis V,H,S3,S4 + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ancillary sea surface temperature (from CMC) + + + sea_surface_temperature + + + Kelvin + + + 0. + + + 313.1499939 + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 + + + + + + + + + + + + Azimuthal look angle relative to North + + + sensor_azimuth_angle + + + degree + + + 0. + + + 360. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + Sun azimuth angle in S/C coordinate system + + + degree + + + 0. + + + 360. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + ancillary sea-ice detection indicator + + + 1 + + + 0 + + + qualityInformation + + + component 1 of anc_sea_ice_flag: climatological sea-ice flag. component 2 of anc_sea_ice_flag: sea-ice flag from AMSR2 RSS AS-ECV V8.2 3-day map. component 3 of anc_sea_ice_flag: sea-ice flag from Meissner and Manaster. + + + 720 1560 2 + + + + + + + + + + + formal uncertainty components of SMAP sea surface salinity smoothed to approx 70km resolution + + + 1e-3 + + + 0. + + + 45. + + + -9999. + + + 1. + + + 0. + + + qualityInformation + + + 1: ancillary wind speed random. 2: NEDT v-pol. 3: NEDT h-pol. 4: ancillary SST. 5: ancillary wind direction. 6: reflected galaxy. 7: land contamination. 8: sea ice contamination. 9: ancillary wind speed systematic. + + + 3 240 520 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + land fraction weighted by antenna gain + + + land_area_fraction + + + 1 + + + 0. + + + 1. + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 1 + + + + + + + + + + + + + + + + + Brightness temperature at flat ocean surface AFTER applying roughness correction. Pol basis V,H,S3,S4 + + + brightness_temperature + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Atmospheric downwelling brightness temperature + + + Kelvin + + + 0. + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 + + + + + + + + + + + + + formal uncertainty components of SMAP sea surface salinity at original 40km resolution + + + 1e-3 + + + 0. + + + 45. + + + -9999. + + + 1. + + + 0. + + + qualityInformation + + + 1: ancillary wind speed random. 2: NEDT v-pol. 3: NEDT h-pol. 4: ancillary SST. 5: ancillary wind direction. 6: reflected galaxy. 7: land contamination. 8: sea ice contamination. 9: ancillary wind speed systematic. + + + 3 240 520 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + RFI filtered antenna temperature. Pol basis V,H,S3,S4 + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Moon glint angle + + + degree + + + -180. + + + 180. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + sea-ice contamination zones + + + 1 + + + 0 + + + 6 + + + 7 + + + qualityInformation + + + 0: open ocean scene. no sea-ice contamination. 1: likely sea-ice contamination in SMAP antenna sidelobes. SSS retrieved. 2: likely sea-ice contamination in SMAP antenna sidelobes. SSS retrieved. 3: likely sea-ice contamination in SMAP antenna mainlobe. SSS retrieved. 4: likely sea-ice contamination in SMAP antenna mainlobe. SSS retrieved. 5: likely sea-ice contamination in SMAP antenna mainlobe. no SSS retrieved. 6: AMSR2 50-km footprint contains land. sea-ice check not reliable. no SSS retrieved if AMSR-2 AS-ECV V8.2 sea-ice flag set. 7: no or invalid AMSR2 observation. sea-ice check not possible. no SSS retrieved if climatological sea-ice flag set. + + + 720 1560 + + + + + + + + + Orbital position angle of S/C. 0 is South. 90 is equator ascending. 180 is North. 270 is equator descending. + + + degree + + + 0. + + + 360. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + Longitudinal (azimuthal) angle of specular reflection ray in ECI2000 + + + degree + + + 0. + + + 360. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + + Expected (RTM) at flat ocean surface. SIC added. This is to be compared with tb_sur0, before SIC. Pol basis V,H,S3,S4 + + + brightness_temperature + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + referenceInformation + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Brightness temperature at top of ionosphere. Pol basis V,H,S3,S4 + + + brightness_temperature + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vertically integrated electron content between surface and S/C + + + 1e16 m-2 + + + 0. + + + -9999. + + + 1. + + + 0. + + + referenceInformation + + + 360 780 1 + + + + + + + + + + + + + + + IMERG rain rate. Resampled to SMAP resolution. + + + rainfall_rate + + + mm h-1 + + + 0. + + + 25. + + + -9999. + + + 1. + + + 0. + + + referenceInformation + + + 360 780 + + + + + + + + + + + + Polarization basis rotation angle (geometric part) + + + degree + + + -90. + + + 90. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + SMAP sea surface salinity at original 40km resolution + + + sea_surface_salinity + + + 1e-3 + + + 0. + + + 45. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + + Antenna temperature after correcting for cold space, galaxy, sun, moon. Pol basis V,H,S3,S4 + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Expected (RTM) antenna temperature before any losses. This is to be compared with ta_ant_calibrated. Pol basis V,H,S3,S4 + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + referenceInformation + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + total formal uncertainty estimate of SMAP sea surface salinity at original 40km resolution + + + 1e-3 + + + 0. + + + 45. + + + -9999. + + + 1. + + + 0. + + + qualityInformation + + + 360 780 1 + + + + + + + + + + + + + + + + Longitude of OI Earth grid cell + + + longitude + + + X + + + degrees_east + + + 0. + + + 360. + + + -9999. + + + 1. + + + 0. + + + coordinate + + + 360 780 1 + + + + + + + + + + + + + + + Reference sea surface salinity from HYCOM + + + sea_surface_salinity + + + 1e-3 + + + 0. + + + 45. + + + -9999. + + + 1. + + + 0. + + + referenceInformation + + + 360 780 + + + + + + + + + + + + 32-bit quality control flag + + + 1 + + + 1 + + + qualityInformation + + + bit 0 set: no radiometer observation in cell. SSS not retrieved. bit 1 set: problenm with OI. SSS not retrieved. bit 2 set: strong land contamination. SSS not retrieved. bit 3 set: strong sea ice contamination. SSS not retrieved. bit 4 set: MLE in SSS retrieval algo has not converged. SSS not retrieved. bit 5 set: sunglint. SSS retrieved. very strong degradation. bit 6 set: moonglint. SSS retrieved. moderate - strong degradation. bit 7 set: high reflected galaxy. SSS retrieved. moderate - strong degradation. bit 8 set: moderate land contamination. SSS retrieved. strong degradation. bit 9 set: moderate sea ice contamination. SSS retrieved. strong degradation. bit 10 set: high residual of MLE in SSS retrieval algo. SSS retrieved. strong degradation. bit 11 set: low SST. SSS retrieved. moderate - strong degradation. bit 12 set: high wind. SSS retrieved. moderate degradation. bit 13 set: light land contamination. SSS retrieved. light degradation. not used in ocean target cal. bit 14 set: light sea ice contamination. SSS retrieved. light - moderate degradation. not used in ocean target cal. bit 15 set: rain flag. SSS retrieved. possibly light degradation. not used in ocean target cal. bit 16 set: climatological sea-ice flag set. no AMSR2 data available for sea-ice detection or correction. no SSS retrieved. + + + 360 780 1 + + + + + + + + + + + + + + + + Sun glint angle. Negative value means that sun ray pierces the Earth. + + + degree + + + -180. + + + 180. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 + + + + + + + + + + + + + + + + land fraction within footprint + + + land_area_fraction + + + 1 + + + 0. + + + 1. + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 1 + + + + + + + + + + + + + + + + Latitude of OI Earth grid cell + + + latitude + + + Y + + + degrees_north + + + -90. + + + 90. + + + -9999. + + + 1. + + + 0. + + + coordinate + + + 360 780 1 + + + + + + + + + + + + + + + + + TA of direct sun intrusion. Pol basis I,Q,S3 + + + Kelvin + + + 0. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Atmospheric upwelling brightness temperature + + + Kelvin + + + 0. + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 + + + + + + + + + + + + + Brightness temperature at top of atmosphere BEFORE applying land correction. Pol basis V,H,S3,S4 + + + toa_brightness_temperature + + + Kelvin + + + 0. + + + 330. + + + -9999. + + + 1. + + + 0. + + + physicalMeasurement + + + 360 780 1 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + sqrt(chi2) of MLE in SSS retrieval + + + Kelvin + + + 0. + + + -9999. + + + 1. + + + 0. + + + qualityInformation + + + 360 780 1 + + + + + + + + + + + + + + + Ancillary solar flux (from NOAA SWPC) + + + 1e-22 W m-2 Hz-1 + + + 0. + + + -9999. + + + 1. + + + 0. + + + auxiliaryInformation + + + 360 780 + + + + + + + + CF-1.7, ACDD-1.3 + + + SMAP ocean surface salinity + + + V5.0 Validated Release + + + The dataset contains the Level 2 output of the NASA/RSS Version 5.0 SMAP Salinity Retrieval Algorithm. It includes all necessary ancillary data and the results of all intermediate steps. The data are resampled on a fixed regular 0.25 deg Earth grid. For details see the Release Notes at https://www.remss.com/missions/smap/salinity/. + + + Funded under Subcontract No.1664013 between JPL and RSS: Production System for NASA Ocean Salinity Science Team (OSST). + + + L2C + + + Level 2: swath observations. optimum interpolated (OI) onto 0.25 deg fixed Earth grid + + + Spatial resolution: approx 70km + + + created by T. Meissner + + + 2022-03-22T14:18:48-0700 + + + 2022-03-22T14:18:48-0700 + + + 2022-03-22T14:18:48-0700 + + + 2022-03-22T14:18:48-0700 + + + Remote Sensing Systems, Santa Rosa, CA, USA + + + RSS SMAP-SSS v5.0 algorithm + + + SMAP + + + SMAP radiometer + + + Production System for NASA Ocean Salinity Science Team (OSST) + + + SURFACE SALINITY, SALINITY, SMAP, NASA, RSS + + + NASA Global Change Master Directory (GCMD) Science Keywords + + + CF Standard Name Table v78 + + + None + + + Thomas Meissner, Remote Sensing Systems + + + meissner@remss.com + + + http://www.remss.com/missions/smap + + + Thomas Meissner, Frank Wentz, Andrew Manaster, Richard Lindsley, Marty Brewer, Michael Densberger, Remote Sensing Systems + + + meissner@remss.com + + + http://www.remss.com/missions/smap + + + 10.5067/SMP50-2SOCS + + + gov.nasa.earthdata + + + T. Meissner, F. Wentz, A. Manaster, R. Lindsley, M. Brewer, M. Densberger + + + 2022 + + + Remote Sensing Systems SMAP L2C Sea Surface Salinity + + + V5.0 Validated Release + + + Remote Sensing Systems, Santa Rosa, CA, USA + + + Available online at www.remss.com/missions/smap + + + 4 + + + Major changes in V5.0: 1. sea-ice flag: based on AMSR-2 surface emissivties and discriminant analysis. 2. sea-ice correction included. 3. formal uncertainty estimates added. + + + 1. V5.0 Release Notes at https://www.remss.com/missions/smap/salinity/ 2. Meissner, T.; Wentz, F.J.; Le Vine, D.M. The Salinity Retrieval Algorithms for the NASA Aquarius Version 5 and SMAP Version 3 Releases. Remote Sens. 2018, 10, 1121. https://doi.org/10.3390/rs10071121 3. Meissner, T.; Manaster, A. SMAP Salinity Retrievals near the Sea-Ice Edge Using Multi-Channel AMSR2 Brightness Temperatures. Remote Sens. 2021, 13, 5120. https://doi.org/10.3390/rs13245120 + + + 2015-04-01T00:43:12Z + + + 2015-04-01T02:24:53Z + + + 870 + + + PT6104S + + + PT6104S + + + grid + + + 2D + + + -90. + + + 90. + + + 0.25 + + + degrees_north + + + 0. + + + 360. + + + 0.25 + + + degrees_east + + + EPSG:5831 + + + 0 + + + 0 + + + 1560 (360 deg longitude + 30 deg to accommodate whole swath). + + + 720 (180 deg latitude). + + + provided as variables cellon and cellat. + + + Piepmeier J. et al., 2020. SMAP L1B SMAP L1B Radiometer Half-Orbit Time-Ordered Brightness Temperatures, Version 5. Boulder, Colorado USA. NASA National Snow and Ice Data Center Distributed Active Archive Center. https://doi.org/10.5067/ZHHBN1KQLI20. + + + Canada Meteorological Center. 2016.GHRSST Level 4 CMC0.1deg Global Foundation Sea Surface Temperature Analysis (GDS version 2). Ver.3.3.doi: 10.5067/GHCMC-4FM03 http://dx.doi.org/10.5067/GHCMC-4FM03. + + + Huffman, G. et al., 2019. NASA Global Precipitation Measurement (GPM) Integrated Multi-satellitE Retrievals for GPM (IMERG) Version 6, LATE RUN, 30-minutes, NASA, http://dx.doi.org/10.5067/GPM/IMERG/3B-HH/06. + + + Mears, C. et al., 2018.Remote Sensing Systems CCMP NRT V2.0 wind speed and direction. Remote Sensing Systems, Santa Rosa, CA. + + + NCEP GFS 0.25-deg 6-hour. UGRD, VGRD. Available from http://nomads.ncep.noaa.gov/. + + + NCEP GDAS 1-deg 6-hour. HGT, PRS, TMP, TMP, RH, CLWMR. Available from http://nomads.ncep.noaa.gov/. + + + Meissner, T. and A. Manaster, 2021. SMAP Salinity Retrievals near the Sea-Ice Edge Using Multi-Channel AMSR2 Brightness Temperatures. Remote Sens. 2021, 13, 5120. https://doi.org/10.3390/rs13245120. + + + noon flux values from US Air Force Radio Solar Telescope sites 1415 MHz values. Available from NOAA Space Weather Prediction Center, www.swpc.noaa.gov. + + + 1 km land/water mask from OCEAN DISCIPLINE PROCESSING SYSTEM (ODPS) based on World Vector Shoreline (WVS)database and World Data Bank. courtesy of Fred Patt, Goddard Space Flight Center, frederick.s.patt@nasa.gov. + + + Hybrid Coordinate Ocean Model, GLBa0.08/expt_90.9, Top layer salinity. Available at www.hycom.org. + + + University of Bern Astronomical Institute (AIUB) Center for Orbit Determination in Europe (CODE) TEC Forecast. Available at http://www.aiub.unibe.ch/download/CODE. + + + 481164192.96085173 + + + 2015 + + + 4 + + + 1 + + + 91 + + + 2592.9608516693115 + + + 481170293.79190642 + + + 2015 + + + 4 + + + 1 + + + 91 + + + 8693.7919063568115 + + + 0.01011999976 + + + 0.01011999976 + + + 1.1046 + + + -0.0001 + + + 0.0035999999999999999 + + + -0.00059999999999999995 + + + 0. + + + 1.1349 + + + 0.0066 + + + -0.0001 + + + 0.00089999999999999998 + + + 0.0041999999999999997 + + + 1.1335999999999999 + + + -0.055300000000000002 + + + 0.00029999999999999997 + + + 0.0014 + + + 0.0117 + + + 1.1296999999999999 + + + 109.3097382 + + + 76.1269989 + + + -1.029843211 + + + -1.033103228 + + + 0.4300000072 + + + -0.1700000018 + + + + 3.20.9-91 + + + 3.20.9-91 + + + libdap-3.20.8-41 + + + +# TheBESKeys::get_as_config() +AllowedHosts=^https?:\/\/ +BES.Catalog.catalog.Exclude=^\..*; +BES.Catalog.catalog.FollowSymLinks=Yes +BES.Catalog.catalog.Include=; +BES.Catalog.catalog.RootDirectory=/tmp/tmp81ogoe2d/ +BES.Catalog.catalog.TypeMatch=dmrpp:.*\.(dmrpp)$; +BES.Catalog.catalog.TypeMatch+=h5:.*(\.bz2|\.gz|\.Z)?$; +BES.Container.Persistence=strict +BES.Data.RootDirectory=/dev/null +BES.DefaultResponseMethod=POST +BES.FollowSymLinks=Yes +BES.Group=group_name +BES.Info.Buffered=no +BES.Info.Type=xml +BES.LogName=./bes.log +BES.LogVerbose=no +BES.Memory.GlobalArea.ControlHeap=no +BES.Memory.GlobalArea.EmergencyPoolSize=1 +BES.Memory.GlobalArea.MaximumHeapSize=20 +BES.Memory.GlobalArea.Verbose=no +BES.ProcessManagerMethod=multiple +BES.ServerAdministrator=admin.email.address@your.domain.name +BES.Uncompress.NumTries=10 +BES.Uncompress.Retry=2000 +BES.UncompressCache.dir=/tmp/hyrax_ux +BES.UncompressCache.prefix=ux_ +BES.UncompressCache.size=500 +BES.User=user_name +BES.module.cmd=/usr/lib64/bes/libdap_xml_module.so +BES.module.dap=/usr/lib64/bes/libdap_module.so +BES.module.dmrpp=/usr/lib64/bes/libdmrpp_module.so +BES.module.fonc=/usr/lib64/bes/libfonc_module.so +BES.module.h5=/usr/lib64/bes/libhdf5_module.so +BES.module.nc=/usr/lib64/bes/libnc_module.so +BES.modules=dap,cmd,h5,dmrpp,nc,fonc +FONc.ClassicModel=false +FONc.NoGlobalAttrs=true +H5.Cache.latlon.path=/tmp/latlon +H5.Cache.latlon.prefix=l +H5.Cache.latlon.size=20000 +H5.CheckIgnoreObj=false +H5.DefaultHandleDimension=true +H5.DisableStructMetaAttr=true +H5.DiskCacheComp=true +H5.DiskCacheCompThreshold=2.0 +H5.DiskCacheCompVarSize=10000 +H5.DiskCacheDataPath=/tmp +H5.DiskCacheFilePrefix=c +H5.DiskCacheFloatOnlyComp=true +H5.DiskCacheSize=10000 +H5.DiskMetaDataCachePath=/tmp +H5.EnableAddPathAttrs=true +H5.EnableCF=false +H5.EnableCFDMR=true +H5.EnableCheckNameClashing=true +H5.EnableDiskDDSCache=false +H5.EnableDiskDataCache=false +H5.EnableDiskMetaDataCache=false +H5.EnableDropLongString=true +H5.EnableEOSGeoCacheFile=false +H5.EnableFillValueCheck=true +H5.KeepVarLeadingUnderscore=false +H5.LargeDataMemCacheEntries=0 +H5.MetaDataMemCacheEntries=300 +H5.SmallDataMemCacheEntries=0 + + + + build_dmrpp -c /tmp/conf_EQOA -f /tmp/tmp81ogoe2d//RSS_SMAP_SSS_L2C_r00870_20150401T004312_2015091_FNL_V05.0.nc -r /tmp/dmr_v8Zh -u OPeNDAP_DMRpp_DATA_ACCESS_URL -M + + + diff --git a/tests/resources/sample_granules_1.json b/tests/resources/sample_granules_1.json new file mode 100644 index 0000000..52a847a --- /dev/null +++ b/tests/resources/sample_granules_1.json @@ -0,0 +1,820 @@ +{ + "hits": 998700, + "took": 118, + "items": [ + { + "meta": { + "concept-type": "granule", + "concept-id": "G1237730654-POCLOUD", + "revision-id": 1, + "native-id": "20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-09-09T06:15:46.676Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-02-27T20:25:00.000Z", + "BeginningDateTime": "2000-02-27T20:20:01.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-09-09T06:15:30.450Z" }, + { "Type": "Update", "Date": "2020-09-09T06:15:30.450Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": 134.98, + "SouthBoundingCoordinate": 66.18, + "EastBoundingCoordinate": 180, + "NorthBoundingCoordinate": 89.988 + }, + { + "WestBoundingCoordinate": -180, + "SouthBoundingCoordinate": 66.18, + "EastBoundingCoordinate": -63.22, + "NorthBoundingCoordinate": 89.988 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 20.032519340515137, + "Name": "20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-09-09T06:10:48.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc.md5", + "Description": "Download 20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc.md5", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc", + "Description": "Download 20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.cmr.json", + "Description": "Download 20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.cmr.json", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1237730655-POCLOUD", + "revision-id": 1, + "native-id": "20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-09-09T06:15:46.731Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-02-27T20:25:00.000Z", + "BeginningDateTime": "2000-02-27T20:20:01.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-09-09T06:15:29.892Z" }, + { "Type": "Update", "Date": "2020-09-09T06:15:29.893Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": 134.98, + "SouthBoundingCoordinate": 66.18, + "EastBoundingCoordinate": 180, + "NorthBoundingCoordinate": 89.988 + }, + { + "WestBoundingCoordinate": -180, + "SouthBoundingCoordinate": 66.18, + "EastBoundingCoordinate": -63.22, + "NorthBoundingCoordinate": 89.988 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 22.86871337890625, + "Name": "20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-09-09T06:10:52.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Description": "Download 20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc.md5", + "Description": "Download 20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc.md5", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Description": "Download 20000227202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1236935090-POCLOUD", + "revision-id": 1, + "native-id": "20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-08-13T19:42:47.345Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-03-22T20:25:00.000Z", + "BeginningDateTime": "2000-03-22T20:20:01.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-08-13T19:42:29.872Z" }, + { "Type": "Update", "Date": "2020-08-13T19:42:29.873Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": -53.517, + "SouthBoundingCoordinate": -89.996, + "EastBoundingCoordinate": 124.935, + "NorthBoundingCoordinate": -64.138 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 19.56984043121338, + "Name": "20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-08-13T19:40:53.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc.md5", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Description": "The base directory location for the granule.", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1236935091-POCLOUD", + "revision-id": 1, + "native-id": "20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-08-13T19:42:48.455Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-03-22T20:25:00.000Z", + "BeginningDateTime": "2000-03-22T20:20:01.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-08-13T19:42:32.639Z" }, + { "Type": "Update", "Date": "2020-08-13T19:42:32.639Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": -53.517, + "SouthBoundingCoordinate": -89.996, + "EastBoundingCoordinate": 124.935, + "NorthBoundingCoordinate": -64.138 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 17.96884536743164, + "Name": "20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-08-13T19:40:49.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc.md5", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc", + "Description": "The base directory location for the granule.", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000322202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.cmr.json", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1236580138-POCLOUD", + "revision-id": 1, + "native-id": "20000402202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-07-26T14:12:58.995Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-04-02T20:25:00.000Z", + "BeginningDateTime": "2000-04-02T20:20:00.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000402202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-07-26T14:12:43.693Z" }, + { "Type": "Update", "Date": "2020-07-26T14:12:43.693Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": 27.135, + "SouthBoundingCoordinate": -42.75, + "EastBoundingCoordinate": 57.131, + "NorthBoundingCoordinate": -21.445 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 25.690549850463867, + "Name": "20000402202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-07-26T14:10:45.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000402202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Description": "The base directory location for the granule.", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000402202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1236932436-POCLOUD", + "revision-id": 1, + "native-id": "20000409202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-08-13T14:44:46.262Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-04-09T20:25:00.000Z", + "BeginningDateTime": "2000-04-09T20:20:01.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000409202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-08-13T14:44:30.013Z" }, + { "Type": "Update", "Date": "2020-08-13T14:44:30.013Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": 28.106, + "SouthBoundingCoordinate": -64.34, + "EastBoundingCoordinate": 72.26, + "NorthBoundingCoordinate": -42.202 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 25.70586585998535, + "Name": "20000409202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-08-13T14:40:48.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000409202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Description": "The base directory location for the granule.", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000409202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc.md5", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000409202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1236932187-POCLOUD", + "revision-id": 1, + "native-id": "20000422202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-08-13T14:13:54.736Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-04-22T20:24:59.000Z", + "BeginningDateTime": "2000-04-22T20:20:00.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000422202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-08-13T14:13:37.958Z" }, + { "Type": "Update", "Date": "2020-08-13T14:13:37.959Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": -1.804, + "SouthBoundingCoordinate": 43.398, + "EastBoundingCoordinate": 43.049, + "NorthBoundingCoordinate": 65.596 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 19.02705478668213, + "Name": "20000422202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-08-13T14:10:48.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000422202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc.md5", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000422202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Description": "The base directory location for the granule.", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000422202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1236185051-POCLOUD", + "revision-id": 1, + "native-id": "20000607202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-07-11T13:13:00.647Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-06-07T20:24:59.000Z", + "BeginningDateTime": "2000-06-07T20:20:00.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000607202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-07-11T13:12:44.793Z" }, + { "Type": "Update", "Date": "2020-07-11T13:12:44.793Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": 20.652, + "SouthBoundingCoordinate": 2.801, + "EastBoundingCoordinate": 46.008, + "NorthBoundingCoordinate": 23.74 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 17.70878028869629, + "Name": "20000607202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-07-11T13:10:45.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000607202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Description": "The base directory location for the granule.", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000607202005-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1237974526-POCLOUD", + "revision-id": 1, + "native-id": "20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-09-11T20:51:57.879Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-07-09T20:24:59.000Z", + "BeginningDateTime": "2000-07-09T20:20:01.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-09-11T20:51:34.847Z" }, + { "Type": "Update", "Date": "2020-09-11T20:51:34.848Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": 20.415, + "SouthBoundingCoordinate": 3.271, + "EastBoundingCoordinate": 45.888, + "NorthBoundingCoordinate": 24.211 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 17.410160064697266, + "Name": "20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-09-11T01:40:48.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Description": "Download 20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc.md5", + "Description": "Download 20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.nc.md5", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Description": "Download 20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0.cmr.json", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + }, + { + "URL": "https://opendap.uat.earthdata.nasa.gov/providers/POCLOUD/collections/GHRSST%20Level%202P%20Global%20Sea%20Surface%20Skin%20Temperature%20from%20the%20Moderate%20Resolution%20Imaging%20Spectroradiometer%20(MODIS)%20on%20the%20NASA%20Terra%20satellite%20(GDS2)/granules/20000709202006-JPL-L2P_GHRSST-SSTskin-MODIS_T-N-v02.0-fv01.0", + "Type": "GET DATA", + "Subtype": "OPENDAP DATA", + "Description": "OPeNDAP request URL" + } + ] + } + }, + { + "meta": { + "concept-type": "granule", + "concept-id": "G1236933771-POCLOUD", + "revision-id": 1, + "native-id": "20000719202000-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0", + "provider-id": "POCLOUD", + "format": "application/vnd.nasa.cmr.umm+json", + "revision-date": "2020-08-13T17:15:44.500Z" + }, + "umm": { + "TemporalExtent": { + "RangeDateTime": { + "EndingDateTime": "2000-07-19T20:25:00.000Z", + "BeginningDateTime": "2000-07-19T20:20:00.000Z" + } + }, + "MetadataSpecification": { + "URL": "https://cdn.earthdata.nasa.gov/umm/granule/v1.6.4", + "Name": "UMM-G", + "Version": "1.6.4" + }, + "GranuleUR": "20000719202000-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0", + "ProviderDates": [ + { "Type": "Insert", "Date": "2020-08-13T17:15:28.180Z" }, + { "Type": "Update", "Date": "2020-08-13T17:15:28.180Z" } + ], + "SpatialExtent": { + "HorizontalSpatialDomain": { + "Geometry": { + "BoundingRectangles": [ + { + "WestBoundingCoordinate": 172.651, + "SouthBoundingCoordinate": -69.538, + "EastBoundingCoordinate": 180, + "NorthBoundingCoordinate": -46.862 + }, + { + "WestBoundingCoordinate": -180, + "SouthBoundingCoordinate": -69.538, + "EastBoundingCoordinate": -136.062, + "NorthBoundingCoordinate": -46.862 + } + ] + } + } + }, + "DataGranule": { + "ArchiveAndDistributionInformation": [ + { + "SizeUnit": "MB", + "Size": 20.450956344604492, + "Name": "20000719202000-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc" + } + ], + "DayNightFlag": "Unspecified", + "ProductionDateTime": "2020-08-13T17:10:49.000Z" + }, + "CollectionReference": { + "Version": "2019.0", + "ShortName": "MODIS_T-JPL-L2P-v2019.0" + }, + "RelatedUrls": [ + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-protected/MODIS_T-JPL-L2P-v2019.0/20000719202000-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc", + "Description": "The base directory location for the granule.", + "Type": "GET DATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000719202000-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.nc.md5", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/podaac-uat-cumulus-public/MODIS_T-JPL-L2P-v2019.0/20000719202000-JPL-L2P_GHRSST-SSTskin-MODIS_T-D-v02.0-fv01.0.cmr.json", + "Description": "File to download", + "Type": "EXTENDED METADATA" + }, + { + "URL": "https://archive.podaac.uat.earthdata.nasa.gov/s3credentials", + "Description": "api endpoint to retrieve temporary credentials valid for same-region direct s3 access", + "Type": "VIEW RELATED INFORMATION" + } + ] + } + } + ] +} diff --git a/tests/test_args.py b/tests/test_args.py new file mode 100644 index 0000000..0878208 --- /dev/null +++ b/tests/test_args.py @@ -0,0 +1,24 @@ +import os +from podaac.hitide_backfill_tool.file_util import write_yaml_file +from podaac.hitide_backfill_tool.args import parse_args + +def test_parsing_args_with_config(tmp_path): + config_path = os.path.join(tmp_path, "test_config.yml") + test_config = { + "cmr": "ops", + "image": "off", + "use_data_url": True, + "default_message_config" : "/test/path" + } + args = ["--config", config_path, "--collection", "abc", "--image=force", "--preview"] + + write_yaml_file(config_path, test_config) + + config = parse_args(args) + + assert config.cmr == "ops" # specified in config file + assert config.collection == "abc" # specified in cli args + assert config.footprint == "on" # specified as default arg + assert config.image == "force" # specified in config file, overridden in cli args + assert config.preview == True # flag specified in cli args + assert config.use_data_url == True # flag specified in config file, NOT overridden in cli args diff --git a/tests/test_file_util.py b/tests/test_file_util.py new file mode 100644 index 0000000..8ec47c7 --- /dev/null +++ b/tests/test_file_util.py @@ -0,0 +1,7 @@ +from podaac.hitide_backfill_tool.file_util import load_json_file + + +def test_loading_a_json_file(): + a = load_json_file("resources/sample_granules_1.json", + relative_to=__file__) + assert a["hits"] == 998700 diff --git a/tests/test_granule_functions.py b/tests/test_granule_functions.py new file mode 100644 index 0000000..4a1de06 --- /dev/null +++ b/tests/test_granule_functions.py @@ -0,0 +1,95 @@ + +from podaac.hitide_backfill_tool.cmr.cmr_granule import CmrGranule +from podaac.hitide_backfill_tool.file_util import load_json_file + +sample_granules = load_json_file( + 'resources/cmr_search_test_granules.json', relative_to=__file__) + + +def test_granule_has_footprint____has_BoundingRect___default_behavior(): + granule = CmrGranule(sample_granules["granule_with_bounding_rectangle"]) + assert not granule.has_footprint() + + +def test_granule_has_footprint____has_BoundingRect___BoundingRectangles_acceptable(): + granule = CmrGranule( + sample_granules["granule_with_bounding_rectangle_and_gpolygon"], footprint_geometries=["BoundingRectangles"]) + assert granule.has_footprint() + + +def test_granule_has_opendap_url____has_OpenDapURL_acceptable(): + granule = CmrGranule( + sample_granules["granule_that_has_opendap_url"]) + assert granule.has_opendap_url() + + +def test_granule_missing_opendap_url____missing_OpenDapURL(): + granule = CmrGranule( + sample_granules["granule_with_bounding_rectangle_and_gpolygon"]) + assert not granule.has_opendap_url() + + +def test_granule_has_footprint____has_footprint(): + granule = CmrGranule( + sample_granules["granule_with_bounding_rectangle_and_gpolygon"]) + assert granule.has_footprint() + + +def test_granule_has_image___has_image(): + granule = CmrGranule( + sample_granules["granule_with_related_urls_and_one_is_image"]) + assert granule.has_image() + + +def test_granule_has_image___has_no_image(): + granule = CmrGranule( + sample_granules["granule_with_related_urls_but_no_image"]) + assert not granule.has_image() + + +def test_functions_return_false_when_data_is_missing_from_granule(): + granule = CmrGranule({}) + assert not granule.has_footprint() + assert not granule.has_image() + + +##### S3 Bucket Info Tests ##### + +def test_gets_s3_bucket_info__from_s3_url__with_one_directory(): + # The sample granule has an s3 url of s3://bucket-name/directory1/filename.nc + granule = CmrGranule(sample_granules["granule_that_has_s3_url_with_one_directory"]) + bucket_info = granule.s3_bucket_info() + + assert bucket_info["bucket"] == "bucket-name" + assert bucket_info["key"] == "directory1/filename.nc" + assert bucket_info["filename"] == "filename.nc" + + +def test_gets_s3_bucket_info__from_s3_url__with_2_directories(): + # The sample granule has an s3 url of s3://bucket-name/directory1/directory2/filename.nc + granule = CmrGranule(sample_granules["granule_that_has_s3_url_with_two_directories"]) + bucket_info = granule.s3_bucket_info() + + assert bucket_info["bucket"] == "bucket-name" + assert bucket_info["key"] == "directory1/directory2/filename.nc" + assert bucket_info["filename"] == "filename.nc" + + +def test_gets_s3_bucket_info__from_data_url__with_one_directory(): + # The sample granule has a data url of https://server-name.com/bucket-name/directory1/filename.nc + granule = CmrGranule(sample_granules["granule_that_has_data_url_with_one_directory"], can_use_data_url_for_s3_bucket_info=True) + bucket_info = granule.s3_bucket_info() + + assert bucket_info["bucket"] == "bucket-name" + assert bucket_info["key"] == "directory1/filename.nc" + assert bucket_info["filename"] == "filename.nc" + + +def test_gets_s3_bucket_info__from_data_url__with_two_directories(): + # The sample granule has a data url of https://server-name.com/bucket-name/directory1/directory2/filename.nc + granule = CmrGranule(sample_granules["granule_that_has_data_url_with_two_directories"], can_use_data_url_for_s3_bucket_info=True) + bucket_info = granule.s3_bucket_info() + + assert bucket_info["bucket"] == "bucket-name" + assert bucket_info["key"] == "directory1/directory2/filename.nc" + assert bucket_info["filename"] == "filename.nc" diff --git a/tests/test_running_the_backfill_tool.py b/tests/test_running_the_backfill_tool.py new file mode 100644 index 0000000..742e877 --- /dev/null +++ b/tests/test_running_the_backfill_tool.py @@ -0,0 +1,127 @@ +import os +import pytest +from podaac.hitide_backfill_tool.cli import main +from moto.core import DEFAULT_ACCOUNT_ID +from moto.sns import sns_backends +from moto import mock_sns, mock_s3 +import boto3 +import json +from podaac.hitide_backfill_tool.file_util import make_absolute + +# +# Fixtures +# + + +@pytest.fixture(scope='function') +def aws_credentials(): + """Mocked AWS Credentials for moto.""" + os.environ['AWS_ACCESS_KEY_ID'] = 'testing' + os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing' + os.environ['AWS_SECURITY_TOKEN'] = 'testing' + os.environ['AWS_SESSION_TOKEN'] = 'testing' + os.environ['AWS_DEFAULT_REGION'] = 'us-west-2' + + +@pytest.fixture(scope='function') +def sns(aws_credentials): + with mock_sns(): + yield boto3.client('sns') + + +@pytest.fixture(scope='function') +def sns_topic(sns): + response = sns.create_topic(Name="test") + topic_arn = response['TopicArn'] + yield sns_backends[DEFAULT_ACCOUNT_ID]["us-west-2"].topics[topic_arn] + + +@pytest.fixture(scope='function') +def s3(aws_credentials): + with mock_s3(): + yield boto3.client('s3') + +@pytest.fixture(scope='function') +def s3_object(s3): + + # Create mock bucket + bucket_name = 'podaac-uat-cumulus-protected' + s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": "us-west-2"}) + + # Upload a file to the bucket + object_key = 'MODIS_A-JPL-L2P-v2019.0/20020704004505-JPL-L2P_GHRSST-SSTskin-MODIS_A-D-v02.0-fv01.0.nc.dmrpp' + + abs_path = make_absolute('resources/sample.nc.dmrpp', relative_to=__file__) + with open(abs_path, 'rb') as f: + s3.put_object(Bucket=bucket_name, Key=object_key, Body=f) + + yield s3 + +# +# Tests +# +@mock_s3 +@pytest.mark.e2e +def test_running_the_backfill_tool_will_send_a_message_to_an_sns_topic(sns_topic): + cumulus_configurations_dir = make_absolute('resources/cumulus_configurations', relative_to=__file__) + + main(f""" + -c MODIS_A-JPL-L2P-v2019.0 + --provider pocloud + --cmr uat + -sd 2002-07-04T00:00:00.000Z -ed 2002-07-04T01:00:00.000Z + --use-data-url + --message-limit 2 + --cumulus uat + --sns-arn {sns_topic.arn} + --log-level DEBUG + --footprint force + --image force + --cumulus-configurations {cumulus_configurations_dir} + --default_message_config tests/resources/default_message_config.json + """) + + # There should two messages for one granule. One for forge and one for tig + notifications = sns_topic.sent_notifications + assert len(notifications) == 2 + + first_message = json.loads(notifications[0][1]) + assert bool(first_message.get('forge')) ^ bool(first_message.get("tig")) + + second_message = json.loads(notifications[1][1]) + assert bool(second_message.get('forge')) ^ bool(second_message.get("tig")) + + +@pytest.mark.skip(reason="Temporarily disabled") +@pytest.mark.e2e +def test_running_the_backfill_tool_for_dmrpp(s3_object, sns_topic): + cumulus_configurations_dir = make_absolute('resources/cumulus_configurations', relative_to=__file__) + + main(f""" + -c MODIS_A-JPL-L2P-v2019.0 + --provider pocloud + --cmr uat + --page-size 100 + -sd 2002-07-04T00:00:00.000Z -ed 2002-07-04T01:00:00.000Z + --use-data-url + --cumulus uat + --sns-arn {sns_topic.arn} + --log-level DEBUG + --footprint off + --image off + --dmrpp on + --dmrpp-min-version 3.20.9-91 + --cumulus-configurations {cumulus_configurations_dir} + --default_message_config "/test" + """) + + notifications = sns_topic.sent_notifications + assert len(notifications) == 13 + + first_message = json.loads(notifications[0][1]) + second_message = json.loads(notifications[1][1]) + + assert bool(second_message.get('dmrpp')) + assert bool(second_message.get("skip_cmr_opendap_update")) + assert bool(first_message.get('dmrpp')) + assert bool(first_message.get("skip_cmr_opendap_update")) == bool(False) diff --git a/tests/test_searching_cmr.py b/tests/test_searching_cmr.py new file mode 100644 index 0000000..8cce159 --- /dev/null +++ b/tests/test_searching_cmr.py @@ -0,0 +1,37 @@ +from podaac.hitide_backfill_tool.cmr.search import GranuleSearch + + +def test_granule_search_with_multiple_pages(): + search = GranuleSearch( + base_url="https://cmr.uat.earthdata.nasa.gov", + collection_short_name="MODIS_A-JPL-L2P-v2019.0", + provider="pocloud", + page_size=3, + page_limit=2 + ) + + count = 0 + while not search.is_done(): + search.get_next_page() + for granule in search.granules(): + count += 1 + + assert count == 6 + + +def test_granule_search_generator(): + search = GranuleSearch( + base_url="https://cmr.uat.earthdata.nasa.gov", + collection_short_name="MODIS_A-JPL-L2P-v2019.0", + provider="pocloud", + page_size=3, + page_limit=2 + ) + + granule_count = 0 + for granule in search.granule_generator(): + granule_count += 1 + assert type(granule.native_id()) == str + + assert granule_count == 6 +