Skip to content

CI with small LLM: use Qwen-2.5 0.5B #41

CI with small LLM: use Qwen-2.5 0.5B

CI with small LLM: use Qwen-2.5 0.5B #41

Workflow file for this run

name: English tests
on:
workflow_dispatch:
push:
branches:
- main
pull_request:
jobs:
canary-multi-turn:
runs-on: ubuntu-22.04
timeout-minutes: 7
steps:
- uses: actions/checkout@v4
- name: Run SearXNG
uses: ./.github/actions/searxng
timeout-minutes: 3
- run: ./gamal.js tests/en/canary-multi-turn.txt
env:
SEARXNG_URL: http://localhost:9000
LLM_API_BASE_URL: ${{ secrets.LLM_API_BASE_URL }}
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
LLM_CHAT_MODEL: ${{ secrets.LLM_CHAT_MODEL }}
LLM_JSON_SCHEMA: 1
high-school-stem:
runs-on: ubuntu-22.04
needs: canary-multi-turn
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- name: Run SearXNG
uses: ./.github/actions/searxng
timeout-minutes: 3
- run: ./gamal.js tests/en/high-school-stem.txt
env:
SEARXNG_URL: http://localhost:9000
LLM_API_BASE_URL: ${{ secrets.LLM_API_BASE_URL }}
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
LLM_CHAT_MODEL: ${{ secrets.LLM_CHAT_MODEL }}
LLM_JSON_SCHEMA: 1