feat: Automated Preset Docker Image Building #90
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Build and Push Preset Models | |
on: | |
# For Testing | |
pull_request: | |
branches: | |
- main | |
paths: | |
- 'pkg/presets/falcon/**' | |
- 'pkg/presets/llama-2/**' | |
- 'pkg/presets/llama-2-chat/**' | |
push: | |
branches: | |
- main | |
paths: | |
- 'pkg/presets/falcon/**' | |
- 'pkg/presets/llama-2/**' | |
- 'pkg/presets/llama-2-chat/**' | |
workflow_dispatch: | |
inputs: | |
release: | |
description: 'Release (yes/no)' | |
required: true | |
default: 'no' | |
image_tag: | |
description: 'Image Tag' | |
required: false | |
permissions: | |
id-token: write | |
contents: read | |
jobs: | |
setup: | |
runs-on: ubuntu-20.04 | |
outputs: | |
image_tag: ${{ steps.set_tag.outputs.image_tag }} | |
FALCON_MODIFIED: ${{ steps.check_modified_paths.outputs.FALCON_MODIFIED }} | |
LLAMA2_MODIFIED: ${{ steps.check_modified_paths.outputs.LLAMA2_MODIFIED }} | |
LLAMA2_CHAT_MODIFIED: ${{ steps.check_modified_paths.outputs.LLAMA2_CHAT_MODIFIED }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
fetch-depth: 0 | |
- name: Check Available Disk Space | |
run: df -h | |
- name: Get Modified files | |
run: | | |
files=$(git diff --name-only HEAD^ HEAD) | |
echo "Modified files: $files" | |
FILES_MODIFIED="" | |
while IFS= read -r file; do | |
trimmed_file=$(echo "$file" | tr -d '[:space:]') | |
echo "Trimmed file: $trimmed_file" | |
FILES_MODIFIED="${FILES_MODIFIED}${trimmed_file};" | |
done <<< "$files" | |
echo "FILES_MODIFIED=${FILES_MODIFIED}" >> $GITHUB_ENV | |
- name: Check Modified Paths | |
id: check_modified_paths | |
run: | | |
FALCON_MODIFIED=false | |
LLAMA2_MODIFIED=false | |
LLAMA2_CHAT_MODIFIED=false | |
IFS=';' read -ra ADDR <<< "$FILES_MODIFIED" | |
for file in "${ADDR[@]}"; do | |
echo "Checking file: $file" | |
if [[ "$file" == pkg/presets/falcon/* ]] && [[ "$FALCON_MODIFIED" == false ]]; then | |
echo "File matches falcon path: $file" | |
FALCON_MODIFIED=true | |
elif [[ "$file" == pkg/presets/llama-2/* ]] && [[ "$LLAMA2_MODIFIED" == false ]]; then | |
echo "File matches llama-2 path: $file" | |
LLAMA2_MODIFIED=true | |
elif [[ "$file" == pkg/presets/llama-2-chat/* ]] && [[ "$LLAMA2_CHAT_MODIFIED" == false ]]; then | |
echo "File matches llama-2-chat path: $file" | |
LLAMA2_CHAT_MODIFIED=true | |
else | |
echo "File does not match any paths: $file" | |
fi | |
done | |
echo "FALCON_MODIFIED=$FALCON_MODIFIED" >> $GITHUB_OUTPUT | |
echo "LLAMA2_MODIFIED=$LLAMA2_MODIFIED" >> $GITHUB_OUTPUT | |
echo "LLAMA2_CHAT_MODIFIED=$LLAMA2_CHAT_MODIFIED" >> $GITHUB_OUTPUT | |
- name: Images to Build | |
run: | | |
echo "FALCON_MODIFIED for this job: ${{ steps.check_modified_paths.outputs.FALCON_MODIFIED }}" | |
echo "LLAMA2_MODIFIED for this job: ${{ steps.check_modified_paths.outputs.LLAMA2_MODIFIED }}" | |
echo "LLAMA2_CHAT_MODIFIED for this job: ${{ steps.check_modified_paths.outputs.LLAMA2_CHAT_MODIFIED }}" | |
- name: Set Image Tag | |
id: set_tag | |
run: | | |
if [[ "${{ github.event_name }}" == "workflow_dispatch" && -n "${{ github.event.inputs.image_tag }}" ]]; then | |
echo "Using workflow dispatch to set image tag" | |
echo "image_tag=${{ github.event.inputs.image_tag }}" >> $GITHUB_OUTPUT | |
else | |
echo "Setting image tag based on latest commit" | |
echo "image_tag=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT | |
fi | |
- name: Print Image Tag | |
run: | | |
echo "image_tag for this job: ${{ steps.set_tag.outputs.image_tag }}" | |
build: | |
needs: setup | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
model: | |
- name: falcon-7b | |
dockerfile: docker/presets/falcon/Dockerfile | |
model_id: tiiuae/falcon-7b | |
build_args: "--build-arg FALCON_MODEL_NAME=tiiuae/falcon-7b" | |
- name: falcon-7b-instruct | |
dockerfile: docker/presets/falcon/Dockerfile | |
model_id: tiiuae/falcon-7b-instruct | |
build_args: "--build-arg FALCON_MODEL_NAME=tiiuae/falcon-7b-instruct" | |
- name: llama-2-7b | |
path: docker/presets/llama-2/Dockerfile | |
build_args: "--build-arg EXTERNAL_IP=__EXTERNAL_IP__ --build-arg EXTERNAL_PORT=__EXTERNAL_PORT__ --build-arg LLAMA_VERSION=llama-2-7b --build-arg SRC_DIR=pkg/presets/llama-2 --build-arg WEB_SERVER_AUTH_TOKEN=__WEB_SERVER_AUTH_TOKEN__" | |
- name: llama-2-7b-chat | |
path: docker/presets/llama-2/Dockerfile | |
build_args: "--build-arg EXTERNAL_IP=__EXTERNAL_IP__ --build-arg EXTERNAL_PORT=__EXTERNAL_PORT__ --build-arg LLAMA_VERSION=llama-2-7b-chat --build-arg SRC_DIR=pkg/presets/llama-2 --build-arg WEB_SERVER_AUTH_TOKEN=__WEB_SERVER_AUTH_TOKEN__" | |
include: | |
- name: falcon-7b | |
if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true' }} | |
- name: falcon-7b-instruct | |
if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true' }} | |
- name: llama-2-7b | |
if: ${{ needs.setup.outputs.LLAMA2_MODIFIED == 'true' }} | |
- name: llama-2-7b-chat | |
if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED == 'true' }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
fetch-depth: 0 | |
- name: Az CLI login | |
uses: azure/login@v1 | |
with: | |
client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
- name: Install Azure CLI latest | |
run: | | |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
- name: 'Login to ACR' | |
run: az acr login --name aimodelsregistry | |
- name: Build and push model | |
run: | | |
BUILD_ARGS="${{ matrix.model.build_args }}" | |
BUILD_ARGS=${BUILD_ARGS/__EXTERNAL_IP__/${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }}} | |
BUILD_ARGS=${BUILD_ARGS/__EXTERNAL_PORT__/${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }}} | |
BUILD_ARGS=${BUILD_ARGS/__WEB_SERVER_AUTH_TOKEN__/${{ secrets.WEB_SERVER_AUTH_TOKEN }}} | |
echo "Docker BUILD_ARGS: $BUILD_ARGS" | |
az acr build \ | |
$BUILD_ARGS \ | |
-t aimodelsregistry.azurecr.io/${{ matrix.model.name }}:${{ needs.setup.outputs.image_tag }} \ | |
-r aimodelsregistry \ | |
-f ${{ matrix.model.dockerfile }} \ | |
. | |
# falcon-7b: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true'}} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Falcon model | |
# run: | | |
# az acr build -t aimodelsregistry.azurecr.io/falcon-7b:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/falcon/Dockerfile \ | |
# --build-arg FALCON_MODEL_NAME=tiiuae/falcon-7b \ | |
# . | |
# falcon-7b-instruct: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true'}} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Falcon model | |
# run: | | |
# az acr build -t aimodelsregistry.azurecr.io/falcon-7b-instruct:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/falcon/Dockerfile \ | |
# --build-arg FALCON_MODEL_NAME=tiiuae/falcon-7b-instruct \ | |
# . | |
# falcon-40b: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true'}} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Falcon model | |
# run: | | |
# az acr build -t aimodelsregistry.azurecr.io/falcon-40b:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/falcon/Dockerfile \ | |
# --build-arg FALCON_MODEL_NAME=tiiuae/falcon-40b \ | |
# . | |
# falcon-40b-instruct: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.FALCON_MODIFIED == 'true'}} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Check Available Disk Space | |
# run: df -h | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Falcon model | |
# run: | | |
# az acr build -t aimodelsregistry.azurecr.io/falcon-40b-instruct:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/falcon/Dockerfile \ | |
# --build-arg FALCON_MODEL_NAME=tiiuae/falcon-40b-instruct \ | |
# . | |
# llama-2-7b: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.LLAMA2_MODIFIED == 'true'}} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Llama model | |
# run: | | |
# az acr build \ | |
# --build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \ | |
# --build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \ | |
# --build-arg LLAMA_VERSION=llama-2-7b \ | |
# --build-arg SRC_DIR=pkg/presets/llama-2 \ | |
# --build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \ | |
# -t aimodelsregistry.azurecr.io/llama-2-7b:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/llama-2/Dockerfile \ | |
# . | |
# llama-2-13b: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.LLAMA2_MODIFIED == 'true'}} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Llama model | |
# run: | | |
# az acr build \ | |
# --build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \ | |
# --build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \ | |
# --build-arg LLAMA_VERSION=llama-2-13b \ | |
# --build-arg SRC_DIR=pkg/presets/llama-2 \ | |
# --build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \ | |
# -t aimodelsregistry.azurecr.io/llama-2-13b:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/llama-2/Dockerfile \ | |
# . | |
# llama-2-70b: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.LLAMA2_MODIFIED == 'true' }} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Llama model | |
# run: | | |
# az acr build \ | |
# --build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \ | |
# --build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \ | |
# --build-arg LLAMA_VERSION=llama-2-70b \ | |
# --build-arg SRC_DIR=pkg/presets/llama-2 \ | |
# --build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \ | |
# -t aimodelsregistry.azurecr.io/llama-2-70b:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/llama-2/Dockerfile \ | |
# . | |
# llama-2-7b-chat: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED == 'true' }} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Llama chat model | |
# run: | | |
# az acr build \ | |
# --build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \ | |
# --build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \ | |
# --build-arg LLAMA_VERSION=llama-2-7b-chat \ | |
# --build-arg SRC_DIR=pkg/presets/llama-2-chat \ | |
# --build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \ | |
# -t aimodelsregistry.azurecr.io/llama-2-7b-chat:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/llama-2/Dockerfile \ | |
# . | |
# llama-2-13b-chat: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED == 'true' }} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Llama chat model | |
# run: | | |
# az acr build \ | |
# --build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \ | |
# --build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \ | |
# --build-arg LLAMA_VERSION=llama-2-13b-chat \ | |
# --build-arg SRC_DIR=pkg/presets/llama-2-chat \ | |
# --build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \ | |
# -t aimodelsregistry.azurecr.io/llama-2-13b-chat:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/llama-2/Dockerfile \ | |
# . | |
# llama-2-70b-chat: | |
# needs: setup | |
# runs-on: ubuntu-20.04 | |
# if: ${{ needs.setup.outputs.LLAMA2_CHAT_MODIFIED == 'true' }} | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v4 | |
# with: | |
# submodules: true | |
# fetch-depth: 0 | |
# - name: Az CLI login | |
# uses: azure/login@v1 | |
# with: | |
# client-id: ${{ secrets.AZURE_KDM_PRESET_CLIENT_ID }} | |
# tenant-id: ${{ secrets.AZURE_TENANT_ID }} | |
# subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} | |
# - name: Install Azure CLI latest | |
# run: | | |
# curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash | |
# - name: 'Login to ACR' | |
# run: az acr login --name aimodelsregistry | |
# - name: Build and push Llama chat model | |
# run: | | |
# az acr build \ | |
# --build-arg EXTERNAL_IP=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_IP }} \ | |
# --build-arg EXTERNAL_PORT=${{ secrets.AZURE_WEB_SERVER_EXTERNAL_PORT }} \ | |
# --build-arg LLAMA_VERSION=llama-2-70b-chat \ | |
# --build-arg SRC_DIR=pkg/presets/llama-2-chat \ | |
# --build-arg WEB_SERVER_AUTH_TOKEN=${{ secrets.WEB_SERVER_AUTH_TOKEN }} \ | |
# -t aimodelsregistry.azurecr.io/llama-2-70b-chat:${{ needs.setup.outputs.image_tag }} \ | |
# -r aimodelsregistry \ | |
# -f docker/presets/llama-2/Dockerfile \ | |
# . |