diff --git a/.github/workflows/test_plugin_uploader.yml b/.github/workflows/test_plugin_uploader.yml new file mode 100644 index 000000000..5c53f44a3 --- /dev/null +++ b/.github/workflows/test_plugin_uploader.yml @@ -0,0 +1,56 @@ +name: Test Plugin Uploader + +on: + push: + branches: + - main + pull_request: + +jobs: + unit-test: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: install deps + working-directory: resources/plugin_uploader + run: pip install -r requirements_dev.txt + + - name: run unit tests + working-directory: resources/plugin_uploader + run: pytest . + + ruff-lint: + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - uses: chartboost/ruff-action@v1 + with: + version: 0.4.10 + src: './resources/plugin_uploader' + + pyright-type-check: + - name: Checkout code + uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: install deps + working-directory: resources/plugin_uploader + run: pip install -r requirements_dev.txt + + - name: run pyright + working-directory: resources/plugin_uploader + run: pyright diff --git a/.gitignore b/.gitignore index f21515d33..78ef7bbb5 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ release_notes.md .idea .vscode .op +__pycache__ diff --git a/resources/plugin_uploader/README.md b/resources/plugin_uploader/README.md new file mode 100644 index 000000000..cb8047b58 --- /dev/null +++ b/resources/plugin_uploader/README.md @@ -0,0 +1,90 @@ +# Plugin uploader + +## Description + +``` +Usage: plugin_uploader.py [OPTIONS] COMMAND [ARGS]... + +CLI tool to upload/index goreleaser-built binaries to/in S3. + +Options: +--help Show this message and exit. + +Commands: +upload-archives Create tar.gz archives from binaries and upload to S3 +upload-manifest Create manifest.json and upload to S3 + +`plugin_uploader.py` is used to upload the binaries generated by goreleaser to S3 in a manner that is consumable by RPK as a plugin. + +``` + +## Install + +`pip install -r requirements.txt` + +## How to use + +Primary use case is in GitHub Actions in response to creation of a GitHub release. + +Here are more details about running this in other contexts (e.g. local testing, manual release interventions) + +It's expected that you have used goreleaser to build a set of binaries for a given release tag (such as following a +GitHub release tag creation). + +Goreleaser creates a `$DIST` directory at the project root dir containing all built binaries and two JSON files: + +* `$DIST/--/` +* ... +* `$DIST/artifacts.json` +* `$DIST/metadata.json` + + +### Create archives from binaries and upload them + +Locate the `artifact.json` and `metadata.json` files produced by goreleaser. +E.g. `$DIST/artifacts.json`, `$DIST/metadata.json`. + +`$DIST="dist"`, by default, but may be overridden to something else in goreleaser configs. + +```shell +./plugin_uploader.py upload-archives \ + --artifacts-file=$DIST/artifacts.json \ + --metadata-file=$DIST/metadata.json \ + --project-root-dir= \ + --region= \ + --bucket= \ + --plugin= \ + --goos= \ + --goarch= +``` + +`PROJECT_ROOT` should be the root directory of the Golang project (by default, wherer `.goreleaser.yml` lives) + +`PLUGIN_NAME` should match the `` as defined in goreleaser configs. +It's assumed that the binary filename is always `redpanda-`. E.g. for `connect`: + +* `build-name` is `connect` +* Binary filename is `redpanda-connect` + +A binary is included for archival / upload only if it matches some `--goos` AND some `--goarch`. + +`--dry-run` is available for skipping final S3 upload step. + +### Create manifest.json and upload it + +This lists all archives for the specific plugin and constructs a `manifest.json` from the listing. + +This should be run after uploading any archives. + +```shell +./plugin_uploader.py upload-manifest \ + --region= \ + --bucket= \ + --plugin= \ + --repo-hostname= +``` + +`--repo-hostname` is used for generating the right public facing download URLs for archives in the plugin repo. E.g. +`rpk-plugins.redpanda.com` + +`--dry-run` is available for skipping the final S3 upload step. diff --git a/resources/plugin_uploader/plugin_uploader.py b/resources/plugin_uploader/plugin_uploader.py new file mode 100755 index 000000000..6815d0ffc --- /dev/null +++ b/resources/plugin_uploader/plugin_uploader.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python3 + +import collections +import dataclasses +import json +import hashlib +import logging +import os +import re +import time +import urllib.parse + +import tarfile +import tempfile + +import boto3 +import click + +from pydantic import BaseModel + + +# Partial schema of goreleaser metadata.json +class Metadata(BaseModel): + tag: str + version: str + + +# Partial schema of goreleaser artifacts.json +class Artifact(BaseModel): + name: str + path: str + type: str + goos: str | None = None + goarch: str | None = None + + +@dataclasses.dataclass +class PluginConfig: + """Encapsulates config specific to a plugin (like `connect`)""" + + plugin_name: str + binary_name: str + + # All these path methods return S3 paths + def get_manifest_path(self) -> str: + return f"{self.plugin_name}/manifest.json" + + def get_archives_root_path(self) -> str: + return f"{self.plugin_name}/archives" + + def get_archives_version_dir_path(self, version: str) -> str: + return f"{self.get_archives_root_path()}/{version}" + + def get_archive_full_path(self, binary_artifact: Artifact, version: str) -> str: + return f"{self.get_archives_version_dir_path(version)}/{binary_artifact.name}-{binary_artifact.goos}-{binary_artifact.goarch}.tar.gz" + + +def get_plugin_config(plugin_name: str) -> PluginConfig: + return PluginConfig(plugin_name=plugin_name, binary_name=f"redpanda-{plugin_name}") + + +def get_binary_sha256_digest(filepath: str) -> str: + with open(filepath, "rb") as f: + s = hashlib.sha256(f.read()) + return s.hexdigest() + + +def get_artifacts(artifacts_file: str) -> list[Artifact]: + with open(artifacts_file, "r") as f: + data = json.load(f) + assert ( + type(data) is list + ), f"Expected {artifacts_file} to contain a JSON list payload" + result = [] + for item in data: + artifact = Artifact(**item) + result.append(artifact) + return result + + +def get_metadata(metadata_file: str) -> Metadata: + with open(metadata_file, "r") as f: + data = json.load(f) + assert ( + type(data) is dict + ), f"Expected {metadata_file} to contain a JSON dict payload" + return Metadata(**data) + + +class S3BucketClient: + """A wrapper around boto3 S3 client that knows the bucket it works with. + Comes with higher level methods as needed.""" + + def __init__(self, bucket: str, region: str): + self._client = boto3.client("s3", region_name=region) + self._bucket = bucket + + def upload_file_with_tags( + self, file: str, object_path: str, tags: dict[str, str] = {} + ): + with open(file, "rb") as f: + return self.upload_blob_with_tags(f.read(), object_path, tags=tags) + + def upload_blob_with_tags( + self, data: bytes, object_path: str, tags: dict[str, str] = {} + ): + self._client.put_object( + Bucket=self._bucket, + Body=data, + Key=object_path, + # We want users to receive latest stuff promptly. + # This minimizes inconsistencies between manifest.json and archives when served over + # Cloudfront + CacheControl="max-age=1", + Tagging=urllib.parse.urlencode(tags), + ) + + def list_dir_recursive(self, s3_dir_path: str | None = None) -> list[str]: + paginator = self._client.get_paginator("list_objects_v2") + if s3_dir_path is None: + pages = paginator.paginate(Bucket=self._bucket) + else: + pages = paginator.paginate(Bucket=self._bucket, Prefix=s3_dir_path) + + keys = [] + for page in pages: + # Indicates empty results, break out immediately + if "Contents" not in page: + break + for obj in page["Contents"]: + keys.append(obj["Key"]) + return keys + + def get_object_tags(self, object_path: str) -> dict[str, str]: + response = self._client.get_object_tagging( + Bucket=self._bucket, + Key=object_path, + ) + result = {} + for tag in response["TagSet"]: + result[tag["Key"]] = tag["Value"] + return result + + +def create_tar_gz_archive(single_filepath: str) -> str: + tmp_archive = tempfile.mktemp() + with tarfile.open(tmp_archive, "w:gz") as tar: + tar.add(single_filepath, arcname=os.path.basename(single_filepath)) + return tmp_archive + + +TAG_BINARY_NAME = "redpanda/binary_name" +TAG_BINARY_SHA256 = "redpanda/binary_sha256" +TAG_GOOS = "redpanda/goos" +TAG_GOARCH = "redpanda/goarch" +TAG_VERSION = "redpanda/version" + +from contextlib import contextmanager + + +@contextmanager +def cwd(new_dir: str): + # Code to acquire resource, e.g.: + old_dir = os.getcwd() + try: + os.chdir(new_dir) + yield + finally: + os.chdir(old_dir) + + +def create_and_upload_archives( + project_root_dir: str, + plugin_config: PluginConfig, + artifacts: list[Artifact], + bucket: str, + region: str, + version: str, + dry_run: bool, +): + if dry_run: + s3_bucket_client = None + else: + s3_bucket_client = S3BucketClient(bucket, region) + with cwd(project_root_dir): + for artifact in artifacts: + logging.info(f"Processing {artifact}") + binary_sha256 = get_binary_sha256_digest(artifact.path) + logging.info(f"Binary SHA256 = {binary_sha256}") + tmp_archive = None + try: + tmp_archive = create_tar_gz_archive(artifact.path) + logging.info(f"Created archive {tmp_archive}") + s3_path_for_archive = plugin_config.get_archive_full_path( + binary_artifact=artifact, version=version + ) + + tags = { + TAG_BINARY_NAME: plugin_config.binary_name, + TAG_BINARY_SHA256: binary_sha256, + TAG_GOOS: artifact.goos, + TAG_GOARCH: artifact.goarch, + TAG_VERSION: version, + } + if dry_run: + logging.info( + f"DRY-RUN - Would have uploaded archive to S3 bucket {bucket} as {s3_path_for_archive}" + ) + logging.info(f"Tags: {json.dumps(tags, indent=4)}") + else: + logging.info( + f"Uploading archive to S3 bucket {bucket} as {s3_path_for_archive}" + ) + assert ( + s3_bucket_client is not None + ), "s3_bucket_client should be initialized in non-dry-run mode" + s3_bucket_client.upload_file_with_tags( + file=tmp_archive, object_path=s3_path_for_archive, tags=tags + ) + finally: + if tmp_archive and os.path.exists(tmp_archive): + os.unlink(tmp_archive) + logging.info("DONE") + + +def get_max_version_str(version_strs: list[str]) -> str | None: + max_version = None + max_version_tuple = None + for version in version_strs: + # Only real releases are eligible to be latest. E.g. no RCs. + m = re.search(r"^(\d+)\.(\d+).(\d+)$", version) + if not m: + continue + version_tuple = (int(m[1]), int(m[2]), int(m[3])) + if max_version_tuple is None or version_tuple > max_version_tuple: + max_version_tuple = version_tuple + max_version = version + return max_version + + +def get_object_tags_for_keys( + s3_bucket_client: S3BucketClient, keys: list[str] +) -> dict[str, dict[str, str]]: + return {k: s3_bucket_client.get_object_tags(k) for k in keys} + + +def create_and_upload_manifest_json( + plugin_config: PluginConfig, + bucket: str, + region: str, + repo_hostname: str, + dry_run: bool, +): + # Event for dry-run mode, we will READ from S3 bucket. We just won't write anything to S3. + # Therefore, S3 creds are needed even for --dry-run + s3_bucket_client = S3BucketClient(bucket, region) + list_path = plugin_config.get_archives_root_path().rstrip("/") + "/" + logging.info(f"Listing all objects in bucket {bucket} under path {list_path}") + keys = s3_bucket_client.list_dir_recursive(list_path) + + object_tags_for_keys = get_object_tags_for_keys(s3_bucket_client, keys) + + archives = [] + manifest = { + "created_at": int(time.time()), + "archives": archives, + } + version_to_artifact_infos: dict[str, list[dict[str, str]]] = ( + collections.defaultdict(list) + ) + for key, tag_map in object_tags_for_keys.items(): + try: + binary_name = tag_map[TAG_BINARY_NAME] + if binary_name != plugin_config.binary_name: + logging.info(f"Skipping {key}, wrong binary name: {binary_name}") + continue + logging.info(f"Found {key} with tags: {tag_map}") + version_to_artifact_infos[tag_map[TAG_VERSION]].append( + { + "binary_name": tag_map[TAG_BINARY_NAME], + "binary_sha256": tag_map[TAG_BINARY_SHA256], + "goos": tag_map[TAG_GOOS], + "goarch": tag_map[TAG_GOARCH], + "path": key, + } + ) + except KeyError as ke: + logging.info(f"Skipping {key}, missing tag: {ke}") + continue + + assert ( + version_to_artifact_infos + ), f"No artifacts found in bucket {bucket} for {plugin_config.plugin_name}" + max_version = get_max_version_str(list(version_to_artifact_infos)) + + if max_version is None: + raise ValueError( + f"Could not deduce max version from existing artifacts in {bucket}" + ) + + for version, artifact_infos in version_to_artifact_infos.items(): + artifacts: dict[str, dict[str, str]] = {} + for artifact_info in artifact_infos: + artifacts[f"{artifact_info['goos']}-{artifact_info['goarch']}"] = { + "path": f"https://{repo_hostname}/{artifact_info["path"]}", + "sha256": artifact_info["binary_sha256"], + } + archive = { + "version": version, + "artifacts": artifacts, + } + if version == max_version: + archive["is_latest"] = True + archives.append(archive) + logging.info("Manifest:") + manifest_json = json.dumps(manifest, indent=4, sort_keys=True) + logging.info(manifest_json) + if dry_run: + logging.info( + f"DRY-RUN - Would have uploaded manifest.json to {plugin_config.get_manifest_path()}" + ) + else: + logging.info(f"Uploading manifest.json to {plugin_config.get_manifest_path()}") + s3_bucket_client.upload_blob_with_tags( + object_path=plugin_config.get_manifest_path(), + data=manifest_json.encode("utf-8"), + ) + + +@click.group(help="CLI tool to upload/index goreleaser-built binaries to/in S3.") +def cli(): + logging.basicConfig( + level=logging.INFO, format="%(asctime)s %(levelname)s %(name)s %(message)s" + ) + + +@cli.command(name="upload-archives", help="Create tar.gz archives from binaries and upload to S3") +@click.option( + "--artifacts-file", + required=True, + help="artifacts.json file produced by `goreleaser`", +) +@click.option( + "--metadata-file", required=True, help="metadata.json file produced by `goreleaser`" +) +@click.option( + "--project-root-dir", required=True, + help="Root directory of the Go project. File paths within artifacts.json are relative to this directory." +) +@click.option("--region", required=True) +@click.option("--bucket", required=True) +@click.option("--plugin", required=True, help="Plugin to process. E.g. `connect`") +@click.option( + "--goos", + required=True, + help="CSV list of OSes to process binaries for. E.g. 'linux,darwin'", +) +@click.option( + "--goarch", + required=True, + help="CSV list of architectures to process binaries for. E.g. 'amd64,arm64'", +) +@click.option( + "--deduce-version-from-tag", + is_flag=True, + help="Deduce version from tag in metadata.json", +) +@click.option("--dry-run", is_flag=True, ) +def upload_archives( + artifacts_file: str, + metadata_file: str, + project_root_dir: str, + region: str, + bucket: str, + plugin: str, + goos: str, + goarch: str, + deduce_version_from_tag: bool, + dry_run: bool, +): + goos_list = goos.split(",") + goarch_list = goarch.split(",") + plugin_config = get_plugin_config(plugin) + artifacts = get_artifacts(artifacts_file) + if deduce_version_from_tag: + version = get_metadata(metadata_file).tag.lstrip("v") + else: + version = get_metadata(metadata_file).version + artifacts_to_process = [ + a + for a in artifacts + if a.type == "Binary" + and a.name == plugin_config.binary_name + and a.goos in goos_list + and a.goarch in goarch_list + ] + logging.info(f"Found {len(artifacts_to_process)} artifacts to process") + for a in artifacts_to_process: + logging.info(f" {a}") + create_and_upload_archives( + project_root_dir=project_root_dir, + plugin_config=plugin_config, + artifacts=artifacts_to_process, + version=version, + region=region, + bucket=bucket, + dry_run=dry_run, + ) + + +@cli.command(name="upload-manifest", help="Create manifest.json and upload to S3") +@click.option("--bucket", required=True) +@click.option("--region", required=True) +@click.option("--repo-hostname", required=True) +@click.option("--plugin", required=True, help="Plugin to process. E.g. `connect`") +@click.option("--dry-run", is_flag=True) +def upload_manifest( + bucket: str, region: str, repo_hostname: str, plugin: str, dry_run: bool +): + plugin_config = get_plugin_config(plugin) + create_and_upload_manifest_json( + plugin_config=plugin_config, + bucket=bucket, + region=region, + repo_hostname=repo_hostname, + dry_run=dry_run, + ) + + +if __name__ == "__main__": + cli() diff --git a/resources/plugin_uploader/requirements.txt b/resources/plugin_uploader/requirements.txt new file mode 100644 index 000000000..c3a1ee0f9 --- /dev/null +++ b/resources/plugin_uploader/requirements.txt @@ -0,0 +1,3 @@ +pydantic>=2.8 +boto3>=1.26 +click==8.1.7 \ No newline at end of file diff --git a/resources/plugin_uploader/requirements_test.txt b/resources/plugin_uploader/requirements_test.txt new file mode 100644 index 000000000..9c844cfda --- /dev/null +++ b/resources/plugin_uploader/requirements_test.txt @@ -0,0 +1,5 @@ +pydantic>=2.8 +boto3>=1.26 +click==8.1.7 +moto[s3]==5.0.13 +pytest==8.3.2 \ No newline at end of file diff --git a/resources/plugin_uploader/test_data/dist/artifacts.json b/resources/plugin_uploader/test_data/dist/artifacts.json new file mode 100644 index 000000000..12946f37c --- /dev/null +++ b/resources/plugin_uploader/test_data/dist/artifacts.json @@ -0,0 +1,35 @@ +[ + { + "name": "metadata.json", + "path": "dist/metadata.json", + "internal_type": 30, + "type": "Metadata" + }, + { + "name": "redpanda-cow", + "path": "dist/cow_linux_amd64_v1/redpanda-cow", + "goos": "linux", + "goarch": "amd64", + "goamd64": "v1", + "internal_type": 4, + "type": "Binary", + "extra": { + "Binary": "redpanda-cow", + "Ext": "", + "ID": "cow" + } + }, + { + "name": "redpanda-cow", + "path": "dist/cow_darwin_arm64/redpanda-cow", + "goos": "darwin", + "goarch": "arm64", + "internal_type": 4, + "type": "Binary", + "extra": { + "Binary": "redpanda-cow", + "Ext": "", + "ID": "cow" + } + } +] \ No newline at end of file diff --git a/resources/plugin_uploader/test_data/dist/cow_darwin_arm64/redpanda-cow b/resources/plugin_uploader/test_data/dist/cow_darwin_arm64/redpanda-cow new file mode 100644 index 000000000..e69de29bb diff --git a/resources/plugin_uploader/test_data/dist/cow_linux_amd64_v1/redpanda-cow b/resources/plugin_uploader/test_data/dist/cow_linux_amd64_v1/redpanda-cow new file mode 100644 index 000000000..e69de29bb diff --git a/resources/plugin_uploader/test_data/dist/metadata_v4_34_0.json b/resources/plugin_uploader/test_data/dist/metadata_v4_34_0.json new file mode 100644 index 000000000..664b807c4 --- /dev/null +++ b/resources/plugin_uploader/test_data/dist/metadata_v4_34_0.json @@ -0,0 +1,12 @@ +{ + "project_name": "cow", + "tag": "v4.34.0", + "previous_tag": "v4.33.0-rc2", + "version": "4.34.0", + "commit": "7eb28f2a994e277f17bf0530097d99208e65cddb", + "date": "2024-08-29T23:53:58.388135715Z", + "runtime": { + "goos": "linux", + "goarch": "arm64" + } +} \ No newline at end of file diff --git a/resources/plugin_uploader/test_data/dist/metadata_v4_35_0.json b/resources/plugin_uploader/test_data/dist/metadata_v4_35_0.json new file mode 100644 index 000000000..1218b32cb --- /dev/null +++ b/resources/plugin_uploader/test_data/dist/metadata_v4_35_0.json @@ -0,0 +1,12 @@ +{ + "project_name": "cow", + "tag": "v4.35.0", + "previous_tag": "v4.34.0-rc2", + "version": "4.35.0", + "commit": "7eb28f2a994e277f17bf0530097d99208e65cddb", + "date": "2024-08-29T23:53:58.388135715Z", + "runtime": { + "goos": "linux", + "goarch": "arm64" + } +} \ No newline at end of file diff --git a/resources/plugin_uploader/test_plugin_uploader.py b/resources/plugin_uploader/test_plugin_uploader.py new file mode 100644 index 000000000..43d7708af --- /dev/null +++ b/resources/plugin_uploader/test_plugin_uploader.py @@ -0,0 +1,156 @@ +import json +import unittest +import boto3 +from moto import mock_aws +from plugin_uploader import S3BucketClient, PluginConfig, cli +import os +from click.testing import CliRunner + +TEST_BUCKET = "my-bucket" +TEST_REGION = "my-region" +TEST_PLUGIN = PluginConfig(plugin_name="cow", binary_name="redpanda-cow") + + +def create_bucket_and_return_clients(): + """Create TEST_BUCKET bucket and return a S3BucketClient for it.""" + client = boto3.client("s3", region_name=TEST_REGION) + client.create_bucket( + Bucket=TEST_BUCKET, + CreateBucketConfiguration={"LocationConstraint": TEST_REGION}, + ) + + # S3BucketClient, boto3 S3 client + return S3BucketClient(TEST_BUCKET, TEST_REGION), client + + +class TestS3BucketClient(unittest.TestCase): + @mock_aws + def test_list_dir_recursive(self): + bucket_client, _ = create_bucket_and_return_clients() + keys_added = set() + for i in range(2048): + key = f"root/{i}/{i}" + keys_added.add(key) + bucket_client.upload_blob_with_tags(object_path=key, data=b"") + found_keys = bucket_client.list_dir_recursive("root") + assert set(found_keys) == keys_added + + +RESIDENT_DIR_PATH = os.path.dirname(os.path.realpath(__file__)) +# "test_data" here would map to root of the real go project (like root of connect repo) +TEST_DATA_DIR_PATH = f"{RESIDENT_DIR_PATH}/test_data" + + +class TestUploadArchives(unittest.TestCase): + + @mock_aws + def test_end_to_end_upload(self): + """Run upload-archives, then upload-manifest + verify all archives and correct manifest uploaded""" + bucket_client, s3_client = create_bucket_and_return_clients() + + runner = CliRunner() + + ARTIFACTS_FILE = f"{TEST_DATA_DIR_PATH}/dist/artifacts.json" + + def _run_and_validate_upload_archives( + metadata_file: str, expected_keys: set[str] + ): + # make bucket_client early, ensures bucket is created before we run the command + os.chdir(TEST_DATA_DIR_PATH) + _result = runner.invoke( + cli, + [ + "upload-archives", + f"--artifacts-file={ARTIFACTS_FILE}", + f"--metadata-file={metadata_file}", + f"--project-root-dir={TEST_DATA_DIR_PATH}", + f"--region={TEST_REGION}", + f"--bucket={TEST_BUCKET}", + f"--plugin={TEST_PLUGIN.plugin_name}", + "--goos=linux,darwin,windows", + "--goarch=amd64,arm64,turing", + ], + # TODO check if regular cli execution also transparent re: exceptions (we want that) + catch_exceptions=False, + ) + assert _result.exit_code == 0 + found_keys = set(bucket_client.list_dir_recursive()) + + assert found_keys == expected_keys + + # upload-archives (first run, for version v4.34.0) + _run_and_validate_upload_archives( + metadata_file=f"{TEST_DATA_DIR_PATH}/dist/metadata_v4_34_0.json", + expected_keys={ + "cow/archives/4.34.0/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.34.0/redpanda-cow-linux-amd64.tar.gz", + }, + ) + + # upload-archives (second run, for version v4.35.0) + _run_and_validate_upload_archives( + metadata_file=f"{TEST_DATA_DIR_PATH}/dist/metadata_v4_35_0.json", + expected_keys={ + "cow/archives/4.34.0/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.34.0/redpanda-cow-linux-amd64.tar.gz", + "cow/archives/4.35.0/redpanda-cow-darwin-arm64.tar.gz", + "cow/archives/4.35.0/redpanda-cow-linux-amd64.tar.gz", + }, + ) + + # upload-manifests (verify both versions of archives show up in manifest.json) + result = runner.invoke( + cli, + [ + "upload-manifest", + f"--region={TEST_REGION}", + f"--bucket={TEST_BUCKET}", + f"--plugin={TEST_PLUGIN.plugin_name}", + "--repo-hostname=cow.farm.com", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + response = s3_client.get_object(Bucket=TEST_BUCKET, Key="cow/manifest.json") + found_manifest = json.load(response["Body"]) + expected_manifest = { + "archives": [ + { + "artifacts": { + "darwin-arm64": { + "path": "https://cow.farm.com/cow/archives/4.34.0/redpanda-cow-darwin-arm64.tar.gz", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + }, + "linux-amd64": { + "path": "https://cow.farm.com/cow/archives/4.34.0/redpanda-cow-linux-amd64.tar.gz", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + }, + }, + "version": "4.34.0", + }, + { + "artifacts": { + "darwin-arm64": { + "path": "https://cow.farm.com/cow/archives/4.35.0/redpanda-cow-darwin-arm64.tar.gz", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + }, + "linux-amd64": { + "path": "https://cow.farm.com/cow/archives/4.35.0/redpanda-cow-linux-amd64.tar.gz", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + }, + }, + "is_latest": True, + "version": "4.35.0", + }, + ], + "created_at": 1700000000, + } + + # align created_at - that is always different + found_manifest["created_at"] = 1700000000 + assert expected_manifest == found_manifest + + +if __name__ == "__main__": + unittest.main()