Skip to content

CI: Use pre-built Llamafile #4

CI: Use pre-built Llamafile

CI: Use pre-built Llamafile #4

Workflow file for this run

name: Test with Babashka
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-22.04
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: Install Babashka
run: curl -OL https://raw.githubusercontent.com/babashka/babashka/master/install && bash ./install
- run: bb --version
- name: Prepare LLM (Phi 2)
uses: ./.github/actions/prepare-llm
- name: Wait until the LLM server is ready
run: while ! curl -s 'http://localhost:8080/health' | grep 'ok'; do sleep 1; done
timeout-minutes: 3
- run: echo 'Which planet in our solar system is the largest?' | ./ask-llm.clj | grep -i jupiter
timeout-minutes: 7
env:
LLM_API_BASE_URL: 'http://127.0.0.1:8080/v1'
LLM_DEBUG: 1