Skip to content

feat: Automated Preset Docker Image Building #78

feat: Automated Preset Docker Image Building

feat: Automated Preset Docker Image Building #78

name: Build and Push Preset Models
on:
# For Testing
pull_request:
branches:
- main
paths:
- 'pkg/presets/falcon/**'
- 'pkg/presets/llama-2/**'
- 'pkg/presets/llama-2-chat/**'
push:
branches:
- main
paths:
- 'pkg/presets/falcon/**'
- 'pkg/presets/llama-2/**'
- 'pkg/presets/llama-2-chat/**'
workflow_dispatch:
inputs:
release:
description: 'Release (yes/no)'
required: true
default: 'no'
image_tag:
description: 'Image Tag'
required: false
permissions:
id-token: write
contents: read
jobs:
setup:
runs-on: ubuntu-20.04
outputs:
image_tag: ${{ steps.set_tag.outputs.image_tag }}
FALCON_MODIFIED: ${{ steps.check_modified_paths.outputs.FALCON_MODIFIED }}
LLAMA2_MODIFIED: ${{ steps.check_modified_paths.outputs.LLAMA2_MODIFIED }}
LLAMA2_CHAT_MODIFIED: ${{ steps.check_modified_paths.outputs.LLAMA2_CHAT_MODIFIED }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Get Modified files
run: |
files=$(git diff --name-only HEAD^ HEAD)
echo "Modified files: $files"
FILES_MODIFIED=""
while IFS= read -r file; do
trimmed_file=$(echo "$file" | tr -d '[:space:]')
echo "Trimmed file: $trimmed_file"
FILES_MODIFIED="${FILES_MODIFIED}${trimmed_file};"
done <<< "$files"
echo "FILES_MODIFIED=${FILES_MODIFIED}" >> $GITHUB_ENV
- name: Check Modified Paths
id: check_modified_paths
run: |
FALCON_MODIFIED=false
LLAMA2_MODIFIED=false
LLAMA2_CHAT_MODIFIED=false
IFS=';' read -ra ADDR <<< "$FILES_MODIFIED"
for file in "${ADDR[@]}"; do
echo "Checking file: $file"
if [[ "$file" == pkg/presets/falcon/* ]] && [[ "$FALCON_MODIFIED" == false ]]; then
echo "File matches falcon path: $file"
FALCON_MODIFIED=true
elif [[ "$file" == pkg/presets/llama-2/* ]] && [[ "$LLAMA2_MODIFIED" == false ]]; then
echo "File matches llama-2 path: $file"
LLAMA2_MODIFIED=true
elif [[ "$file" == pkg/presets/llama-2-chat/* ]] && [[ "$LLAMA2_CHAT_MODIFIED" == false ]]; then
echo "File matches llama-2-chat path: $file"
LLAMA2_CHAT_MODIFIED=true
else
echo "File does not match any paths: $file"
fi
done
echo "FALCON_MODIFIED=$FALCON_MODIFIED" >> $GITHUB_OUTPUT
echo "LLAMA2_MODIFIED=$LLAMA2_MODIFIED" >> $GITHUB_OUTPUT
echo "LLAMA2_CHAT_MODIFIED=$LLAMA2_CHAT_MODIFIED" >> $GITHUB_OUTPUT
- name: Images to Build
run: |
echo "FALCON_MODIFIED for this job: ${{ steps.check_modified_paths.outputs.FALCON_MODIFIED }}"
echo "LLAMA2_MODIFIED for this job: ${{ steps.check_modified_paths.outputs.LLAMA2_MODIFIED }}"
echo "LLAMA2_CHAT_MODIFIED for this job: ${{ steps.check_modified_paths.outputs.LLAMA2_CHAT_MODIFIED }}"
- name: Set Image Tag
id: set_tag
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" && -n "${{ github.event.inputs.image_tag }}" ]]; then
echo "Using workflow dispatch to set image tag"
echo "image_tag=${{ github.event.inputs.image_tag }}" >> $GITHUB_OUTPUT
else
echo "Setting image tag based on latest commit"
echo "image_tag=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
fi
- name: Print Image Tag
run: |
echo "image_tag for this job: ${{ steps.set_tag.outputs.image_tag }}"
falcon-7b:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true'}}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Falcon model
run: |
az acr build -t aimodelsregistry.azurecr.io/falcon-7b:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/falcon/Dockerfile \
--build-arg FALCON_MODEL_NAME=tiiuae/falcon-7b \
.
falcon-7b-instruct:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true'}}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Falcon model
run: |
az acr build -t aimodelsregistry.azurecr.io/falcon-7b-instruct:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/falcon/Dockerfile \
--build-arg FALCON_MODEL_NAME=tiiuae/falcon-7b-instruct \
.
falcon-40b:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true'}}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Falcon model
run: |
az acr build -t aimodelsregistry.azurecr.io/falcon-40b:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/falcon/Dockerfile \
--build-arg FALCON_MODEL_NAME=tiiuae/falcon-40b \
.
falcon-40b-instruct:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true'}}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Falcon model
run: |
az acr build -t aimodelsregistry.azurecr.io/falcon-40b-instruct:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/falcon/Dockerfile \
--build-arg FALCON_MODEL_NAME=tiiuae/falcon-40b-instruct \
.
llama-2-7b:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_MODIFIED == 'true'}}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Llama model
run: |
az acr build \
--build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \
--build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \
--build-arg LLAMA_VERSION=llama-2-7b \
--build-arg SRC_DIR=pkg/presets/llama-2 \
--build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \
-t aimodelsregistry.azurecr.io/llama-2-7b:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/llama-2/Dockerfile \
.
llama-2-13b:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_MODIFIED == 'true'}}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Llama model
run: |
az acr build \
--build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \
--build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \
--build-arg LLAMA_VERSION=llama-2-13b \
--build-arg SRC_DIR=pkg/presets/llama-2 \
--build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \
-t aimodelsregistry.azurecr.io/llama-2-13b:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/llama-2/Dockerfile \
.
llama-2-70b:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_MODIFIED == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Llama model
run: |
az acr build \
--build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \
--build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \
--build-arg LLAMA_VERSION=llama-2-70b \
--build-arg SRC_DIR=pkg/presets/llama-2 \
--build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \
-t aimodelsregistry.azurecr.io/llama-2-70b:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/llama-2/Dockerfile \
.
llama-2-7b-chat:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Llama chat model
run: |
az acr build \
--build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \
--build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \
--build-arg LLAMA_VERSION=llama-2-7b-chat \
--build-arg SRC_DIR=pkg/presets/llama-2-chat \
--build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \
-t aimodelsregistry.azurecr.io/llama-2-7b-chat:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/llama-2/Dockerfile \
.
llama-2-13b-chat:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Llama chat model
run: |
az acr build \
--build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \
--build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \
--build-arg LLAMA_VERSION=llama-2-13b-chat \
--build-arg SRC_DIR=pkg/presets/llama-2-chat \
--build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \
-t aimodelsregistry.azurecr.io/llama-2-13b-chat:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/llama-2/Dockerfile \
.
llama-2-70b-chat:
needs: setup
runs-on: ubuntu-20.04
if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Az CLI login
uses: azure/login@v1
with:
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install Azure CLI latest
run: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
- name: 'Login to ACR'
run: az acr login --name aimodelsregistry
- name: Build and push Llama chat model
run: |
az acr build \
--build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \
--build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \
--build-arg LLAMA_VERSION=llama-2-70b-chat \
--build-arg SRC_DIR=pkg/presets/llama-2-chat \
--build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \
-t aimodelsregistry.azurecr.io/llama-2-70b-chat:${{ needs.setup.outputs.image_tag }} \
-r aimodelsregistry \
-f docker/presets/llama-2/Dockerfile \
.