From e7c833fab3c221afd690d82cacb231a5b98877a1 Mon Sep 17 00:00:00 2001 From: Andy Wu Date: Thu, 22 Aug 2024 11:51:12 -0700 Subject: [PATCH] [feature] add ci workflow to push to the new s3 bucket --- .github/workflows/ci-s3.yml | 175 ++++++++++++++++++++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 .github/workflows/ci-s3.yml diff --git a/.github/workflows/ci-s3.yml b/.github/workflows/ci-s3.yml new file mode 100644 index 000000000..9039453f4 --- /dev/null +++ b/.github/workflows/ci-s3.yml @@ -0,0 +1,175 @@ +name: Build and Upload geth Binary to s3 + +on: + push: + branches: + - main + # pull_request: + # branches: + # - main + +permissions: + id-token: write + contents: write + pull-requests: write + actions: write + +env: + NUM_INTERNAL_BINARIES_TO_KEEP: 50 + NUM_PUBLIC_BINARIES_TO_KEEP: 400 + S3_BUCKET: story-geth-binaries + +jobs: + # Add timestamp + Timestamp: + uses: storyprotocol/gha-workflows/.github/workflows/reusable-timestamp.yml@main + + # Build and upload the geth binary + build_and_push: + needs: Timestamp + runs-on: ubuntu-latest + strategy: + matrix: + platform: + [linux-amd64, linux-arm64, darwin-amd64, darwin-arm64, windows-amd64] + + steps: + - name: Checkout code + uses: actions/checkout@v4.1.5 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_SERVICE_STAGING }}:role/iac-max-role + aws-region: us-west-1 + role-session-name: github-actions + + - name: Extract the version + run: | + PARAMS_FILE="./params/version.go" + VERSION_MAJOR=$(awk -F= '/VersionMajor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE) + VERSION_MINOR=$(awk -F= '/VersionMinor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE) + VERSION_PATCH=$(awk -F= '/VersionPatch/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE) + VERSION_META=$(awk -F\" '/VersionMeta/ {print $2; exit}' $PARAMS_FILE) + + # Construct the full version string + VERSION="$VERSION_MAJOR.$VERSION_MINOR.$VERSION_PATCH" + if [ "$VERSION_META" != "stable" ]; then + VERSION+="-${VERSION_META}" + fi + + echo "Version extracted: $VERSION" + echo "VERSION=$VERSION" >> $GITHUB_ENV + + - name: Build the geth binary + run: | + IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}" + output_name=./build/bin/geth + if [ "$GOOS" = "windows" ]; then + output_name+='.exe' + fi + + echo "Building for $GOOS/$GOARCH..." + env GOOS=$GOOS GOARCH=$GOARCH go build -o $output_name ./cmd/geth + + if [ $? -ne 0 ]; then + echo "Build failed!" + exit 1 + fi + + if [ ! -f "$output_name" ]; then + echo "Geth binary not found!" + exit 1 + fi + + # Apply chmod only for non-windows builds + if [ "$GOOS" != "windows" ]; then + chmod +x "$output_name" + fi + + - name: Upload the geth binary to S3 + run: | + export TZ=America/Los_Angeles + IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}" + TIMESTAMP=$(date +%Y%m%d%H%M%S) + HUMAN_READABLE_VERSION=$(date) + COMMIT_HASH=$(git rev-parse --short HEAD) + FOLDER_NAME="geth-${{ matrix.platform }}-${VERSION}-${COMMIT_HASH}" + ARCHIVE_NAME="${FOLDER_NAME}.tar.gz" + + binary_name=./build/bin/geth + if [ "$GOOS" = "windows" ]; then + binary_name+='.exe' + fi + + # For linux amd64 upload the binary for internal testing + if [ "${{ matrix.platform }}" = "linux-amd64" ]; then + + echo "Uploading binary for internal use..." + aws s3 cp $binary_name s3://$S3_BUCKET/geth/geth-$TIMESTAMP --quiet + + # Update manifest file for linux-amd64 builds only + aws s3 cp s3://$S3_BUCKET/geth/manifest.txt manifest.txt --quiet || touch manifest.txt + echo "$TIMESTAMP" >> manifest.txt + aws s3 cp manifest.txt s3://$S3_BUCKET/geth/manifest.txt --quiet + fi + + mkdir $FOLDER_NAME + mv $binary_name $FOLDER_NAME/ + + echo "Archiving the geth binary..." + tar -czvf $ARCHIVE_NAME $FOLDER_NAME + + if [ $? -ne 0 ]; then + echo "Failed to create the archive: $ARCHIVE_NAME" + exit 1 + fi + + echo "Uploading $ARCHIVE_NAME to S3..." + aws s3 cp $ARCHIVE_NAME s3://$S3_BUCKET/geth-public/$ARCHIVE_NAME --quiet + + if [ $? -ne 0 ]; then + echo "Failed to upload $ARCHIVE_NAME to S3!" + exit 1 + fi + + cleanup: + runs-on: ubuntu-latest + needs: build_and_push + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: arn:aws:iam::478656756051:role/iac-max-role + aws-region: us-west-1 + role-session-name: github-actions + + - name: Cleanup old binaries + run: | + cleanup_s3() { + PREFIX=$1 + KEEP=$2 + + echo "Cleaning up in bucket $S3_BUCKET with prefix: $PREFIX, keeping latest $KEEP binaries" + + aws s3api list-objects-v2 --bucket $S3_BUCKET --prefix $PREFIX --query "sort_by(Contents,&LastModified)[*].Key" > all_binaries.json + + # Extract the list of keys, remove the latest $KEEP binaries + BINARIES_TO_DELETE=$(jq -r ".[0:-${KEEP}][]" all_binaries.json) + + if [ -n "$BINARIES_TO_DELETE" ]; then + # Delete old binaries + for key in $BINARIES_TO_DELETE; do + aws s3 rm s3://$S3_BUCKET/$key --quiet + done + echo "Deleted old binaries: $BINARIES_TO_DELETE" + else + echo "No old binaries to delete." + fi + } + + # Cleanup internal geth binaries + cleanup_s3 "geth/" "${NUM_INTERNAL_BINARIES_TO_KEEP}" + + # Cleanup public geth binaries + cleanup_s3 "geth-public/" "${NUM_PUBLIC_BINARIES_TO_KEEP}"