Skip to content

feat(core/vm): adds eip-7212 precompile (#13) #49

feat(core/vm): adds eip-7212 precompile (#13)

feat(core/vm): adds eip-7212 precompile (#13) #49

Workflow file for this run

name: Build and Upload geth Binary to s3
on:
push:
branches:
- main
paths-ignore:
- '.github/**'
pull_request:
branches:
- main
paths-ignore:
- '.github/**'
workflow_dispatch:
permissions:
id-token: write
contents: write
pull-requests: write
actions: write
env:
NUM_INTERNAL_BINARIES_TO_KEEP: 50
NUM_PUBLIC_BINARIES_TO_KEEP: 400
S3_BUCKET: story-geth-binaries
jobs:
# Add timestamp
Timestamp:
uses: storyprotocol/gha-workflows/.github/workflows/reusable-timestamp.yml@main
# Build and upload the geth binary
build_and_push:
needs: Timestamp
runs-on: ubuntu-latest
strategy:
matrix:
platform:
[linux-amd64, linux-arm64, darwin-amd64, darwin-arm64, windows-amd64]
steps:
- name: Checkout code
uses: actions/[email protected]
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_SERVICE_STAGING }}:role/iac-max-role
aws-region: us-west-1
role-session-name: github-actions
- name: Extract the version
run: |
PARAMS_FILE="./params/version.go"
VERSION_MAJOR=$(awk -F= '/VersionMajor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_MINOR=$(awk -F= '/VersionMinor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_PATCH=$(awk -F= '/VersionPatch/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_META=$(awk -F\" '/VersionMeta/ {print $2; exit}' $PARAMS_FILE)
# Construct the full version string
VERSION="$VERSION_MAJOR.$VERSION_MINOR.$VERSION_PATCH"
if [ "$VERSION_META" != "stable" ]; then
VERSION+="-${VERSION_META}"
fi
echo "Version extracted: $VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
echo "VERSION_META=$VERSION_META" >> $GITHUB_ENV
- name: Build the geth binary
run: |
IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}"
output_name=./build/bin/geth
if [ "$GOOS" = "windows" ]; then
output_name+='.exe'
fi
echo "Building for $GOOS/$GOARCH..."
env GOOS=$GOOS GOARCH=$GOARCH go build -o $output_name ./cmd/geth
if [ $? -ne 0 ]; then
echo "Build failed!"
exit 1
fi
if [ ! -f "$output_name" ]; then
echo "Geth binary not found!"
exit 1
fi
# Apply chmod only for non-windows builds
if [ "$GOOS" != "windows" ]; then
chmod +x "$output_name"
fi
- name: Upload the geth binary to S3
run: |
export TZ=America/Los_Angeles
IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}"
TIMESTAMP=$(date +%Y%m%d%H%M%S)
echo "Timestamp: $TIMESTAMP"
HUMAN_READABLE_VERSION=$(date)
COMMIT_HASH=$(git rev-parse --short HEAD)
FOLDER_NAME="geth-${{ matrix.platform }}-${VERSION}-${COMMIT_HASH}"
ARCHIVE_NAME="${FOLDER_NAME}.tar.gz"
PUBLIC_DOWNLOAD_URL="https://$S3_BUCKET.s3.us-west-1.amazonaws.com/geth-public/$ARCHIVE_NAME"
binary_name=./build/bin/geth
if [ "$GOOS" = "windows" ]; then
binary_name+='.exe'
fi
# For linux amd64 upload the binary for internal testing
if [ "${{ matrix.platform }}" = "linux-amd64" ]; then
echo "Uploading binary for internal use..."
aws s3 cp $binary_name s3://$S3_BUCKET/geth/geth-$TIMESTAMP --quiet
# Update manifest file for linux-amd64 builds only
aws s3 cp s3://$S3_BUCKET/geth/manifest.txt manifest.txt --quiet || touch manifest.txt
echo "$TIMESTAMP" >> manifest.txt
aws s3 cp manifest.txt s3://$S3_BUCKET/geth/manifest.txt --quiet
# Update version file
aws s3 cp s3://$S3_BUCKET/geth-public/version.txt version.txt --quiet || printf "File Name\t\t\tVerison\t\t\t\tCommit Hash\t\tTimestamp\n" > version.txt
if [ "${VERSION_META}" != "stable" ]; then
printf "$VERSION-$COMMIT_HASH\t\t$VERSION\t\t\t$COMMIT_HASH\t\t\t$TIMESTAMP\n" >> version.txt
else
printf "$VERSION-$COMMIT_HASH\t\t\t$VERSION\t\t\t\t$COMMIT_HASH\t\t\t$TIMESTAMP\n" >> version.txt
fi
aws s3 cp version.txt s3://$S3_BUCKET/geth-public/version.txt --quiet
fi
mkdir $FOLDER_NAME
mv $binary_name $FOLDER_NAME/
echo "Archiving the geth binary..."
tar -czvf $ARCHIVE_NAME $FOLDER_NAME
if [ $? -ne 0 ]; then
echo "Failed to create the archive: $ARCHIVE_NAME"
exit 1
fi
echo "Uploading $ARCHIVE_NAME to S3..."
aws s3 cp $ARCHIVE_NAME s3://$S3_BUCKET/geth-public/$ARCHIVE_NAME --quiet
if [ $? -ne 0 ]; then
echo "Failed to upload $ARCHIVE_NAME to S3!"
exit 1
fi
echo "COMMIT_HASH=$COMMIT_HASH" >> $GITHUB_ENV
echo "PUBLIC_DOWNLOAD_URL=$PUBLIC_DOWNLOAD_URL" >> $GITHUB_ENV
- name: Add binary version back to PR
if: matrix.platform == 'linux-amd64'
uses: mshick/add-pr-comment@v2
with:
message: |
### Binary uploaded successfully 🎉
📦 **Version Name:** ${{ env.VERSION }}-${{ env.COMMIT_HASH }}
📦 **Download Source:** [AWS S3](${{ env.PUBLIC_DOWNLOAD_URL }})
cleanup:
runs-on: ubuntu-latest
needs: build_and_push
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: arn:aws:iam::478656756051:role/iac-max-role
aws-region: us-west-1
role-session-name: github-actions
- name: Cleanup old binaries
run: |
cleanup_s3() {
PREFIX=$1
KEEP=$2
echo "Cleaning up in bucket $S3_BUCKET with prefix: $PREFIX, keeping latest $KEEP binaries"
aws s3api list-objects-v2 --bucket $S3_BUCKET --prefix $PREFIX --query "sort_by(Contents,&LastModified)[*].Key" > all_binaries.json
# Extract the list of keys, remove the latest $KEEP binaries
BINARIES_TO_DELETE=$(jq -r ".[0:-${KEEP}][]" all_binaries.json)
if [ -n "$BINARIES_TO_DELETE" ]; then
# Delete old binaries
for key in $BINARIES_TO_DELETE; do
aws s3 rm s3://$S3_BUCKET/$key --quiet
done
echo "Deleted old binaries: $BINARIES_TO_DELETE"
else
echo "No old binaries to delete."
fi
}
# Cleanup internal geth binaries
cleanup_s3 "geth/" "${NUM_INTERNAL_BINARIES_TO_KEEP}"
# Cleanup public geth binaries
cleanup_s3 "geth-public/" "${NUM_PUBLIC_BINARIES_TO_KEEP}"