Skip to content

feat: Automated Preset Docker Image Building #38

feat: Automated Preset Docker Image Building

feat: Automated Preset Docker Image Building #38

name: Build and Push Preset Models
on:
pull_request:
branches:
- main
paths:
- 'pkg/presets/falcon/**'
- 'pkg/presets/llama-2/**'
- 'pkg/presets/llama-2-chat/**'
push:
branches:
- main
paths:
- 'pkg/presets/falcon/**'
- 'pkg/presets/llama-2/**'
- 'pkg/presets/llama-2-chat/**'
workflow_dispatch:
inputs:
release:
description: 'Release (yes/no)'
required: true
default: 'no'
image_tag:
description: 'Image Tag'
required: false
permissions:
id-token: write
contents: read
jobs:
setup:
runs-on: ubuntu-20.04
outputs:
image_tag: ${{ steps.set_tag.outputs.image_tag }}
FALCON_MODIFIED: ${{ steps.check_modified_paths.outputs.FALCON_MODIFIED }}
LLAMA2_MODIFIED: ${{ steps.check_modified_paths.outputs.LLAMA2_MODIFIED }}
LLAMA2_CHAT_MODIFIED: ${{ steps.check_modified_paths.outputs.LLAMA2_CHAT_MODIFIED }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
# - name: Initialize Modified Path Variables
# run: |
# echo "::set-output name=FALCON_MODIFIED::false"
# echo "::set-output name=LLAMA2_MODIFIED::false"
# echo "::set-output name=LLAMA2_CHAT_MODIFIED::false"
- name: Check Modified files
run: |
files=$(git diff --name-only HEAD^ HEAD)
echo "Modified files: $files"
FILES_MODIFIED=""
while IFS= read -r file; do
trimmed_file=$(echo "$file" | tr -d '[:space:]')
echo "Trimmed file: $trimmed_file"
FILES_MODIFIED="${FILES_MODIFIED}${trimmed_file};"
done <<< "$files"
echo "FILES_MODIFIED=${FILES_MODIFIED}" >> $GITHUB_ENV
- name: Check Modified Paths
id: check_modified_paths
run: |
FALCON_MODIFIED=false
LLAMA2_MODIFIED=false
LLAMA2_CHAT_MODIFIED=false
IFS=';' read -ra ADDR <<< "$FILES_MODIFIED"
for file in "${ADDR[@]}"; do
echo "Checking file: $file"
if [[ "$file" == pkg/presets/falcon/* ]] && [[ "$FALCON_MODIFIED" == false ]]; then
echo "File matches falcon path: $file"
FALCON_MODIFIED=true
elif [[ "$file" == pkg/presets/llama-2/* ]] && [[ "$LLAMA2_MODIFIED" == false ]]; then
echo "File matches llama-2 path: $file"
LLAMA2_MODIFIED=true
elif [[ "$file" == pkg/presets/llama-2-chat/* ]] && [[ "$LLAMA2_CHAT_MODIFIED" == false ]]; then
echo "File matches llama-2-chat path: $file"
LLAMA2_CHAT_MODIFIED=true
else
echo "File does not match any paths: $file"
fi
done
echo "::set-output name=FALCON_MODIFIED::$FALCON_MODIFIED"
echo "::set-output name=LLAMA2_MODIFIED::$LLAMA2_MODIFIED"
echo "::set-output name=LLAMA2_CHAT_MODIFIED::$LLAMA2_CHAT_MODIFIED"
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: Set Image Tag
id: set_tag
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" && -n "${{ github.event.inputs.image_tag }}" ]]; then
echo "::set-output name=image_tag::${{ github.event.inputs.image_tag }}"
else
echo "::set-output name=image_tag::$(git rev-parse --short HEAD)"
fi
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
falcon:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.FALCON_MODIFIED }} == 'true'
steps:
- name: Build and push Falcon model
run: |
cd docker/presets/falcon
az acr build -t aimodelsregistry.azurecr.io/falcon:${{ needs.setup.outputs.image_tag }} -r aimodelsregistry .
llama-2-7b:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_MODIFIED }} == 'true'
steps:
- name: Build and push Llama model
run: |
az acr build --build-arg LLAMA_VERSION=llama-2-7b --build-arg SRC_DIR=pkg/presets/llama-2 -t aimodelsregistry.azurecr.io/llama-2-7b:${{ needs.setup.outputs.image_tag }} -r aimodelsregistry .
llama-2-13b:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_MODIFIED }} == 'true'
steps:
- name: Build and push Llama model
run: |
az acr build --build-arg LLAMA_VERSION=llama-2-13b --build-arg SRC_DIR=pkg/presets/llama-2 -t aimodelsregistry.azurecr.io/llama-2-13b:${{ needs.setup.outputs.image_tag }} -r aimodelsregistry .
llama-2-70b:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_MODIFIED }} == 'true'
steps:
- name: Build and push Llama model
run: |
az acr build --build-arg LLAMA_VERSION=llama-2-70b --build-arg SRC_DIR=pkg/presets/llama-2 -t aimodelsregistry.azurecr.io/llama-2-70b:${{ needs.setup.outputs.image_tag }} -r aimodelsregistry .
llama-2-7b-chat:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED }} == 'true'
steps:
- name: Build and push Llama chat model
run: |
az acr build --build-arg LLAMA_VERSION=llama-2-7b-chat --build-arg SRC_DIR=pkg/presets/llama-2-chat -t aimodelsregistry.azurecr.io/llama-2-7b-chat:${{ needs.setup.outputs.image_tag }} -r aimodelsregistry .
llama-2-13b-chat:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED }} == 'true'
steps:
- name: Build and push Llama chat model
run: |
az acr build --build-arg LLAMA_VERSION=llama-2-13b-chat --build-arg SRC_DIR=pkg/presets/llama-2-chat -t aimodelsregistry.azurecr.io/llama-2-13b-chat:${{ needs.setup.outputs.image_tag }} -r aimodelsregistry .
llama-2-70b-chat:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED }} == 'true'
steps:
- name: Build and push Llama chat model
run: |
az acr build --build-arg LLAMA_VERSION=llama-2-70b-chat --build-arg SRC_DIR=pkg/presets/llama-2-chat -t aimodelsregistry.azurecr.io/llama-2-70b-chat:${{ needs.setup.outputs.image_tag }} -r aimodelsregistry .