mirror of
https://github.com/dogkeeper886/ollama37.git
synced 2025-12-17 11:17:11 +00:00
- Add cicd/docker-compose.judge.yml for stable reference Ollama - Runs on port 11435 (separate from test subject on 11434) - Uses dogkeeper886/ollama37:latest from DockerHub - Add cicd/README.md documenting CI infrastructure 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
34 lines
874 B
YAML
34 lines
874 B
YAML
services:
|
|
# LLM Judge - stable reference version for evaluating test results
|
|
# Runs on port 11435 to avoid conflict with test subject (11434)
|
|
ollama-judge:
|
|
image: dogkeeper886/ollama37:latest
|
|
container_name: ollama37-judge
|
|
runtime: nvidia
|
|
deploy:
|
|
resources:
|
|
reservations:
|
|
devices:
|
|
- driver: nvidia
|
|
count: all
|
|
capabilities: [gpu]
|
|
ports:
|
|
- "11435:11434"
|
|
volumes:
|
|
- ollama-judge-data:/root/.ollama
|
|
environment:
|
|
- OLLAMA_HOST=0.0.0.0:11434
|
|
- NVIDIA_VISIBLE_DEVICES=all
|
|
- NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
|
restart: unless-stopped
|
|
healthcheck:
|
|
test: ["CMD", "/usr/local/bin/ollama", "list"]
|
|
interval: 30s
|
|
timeout: 10s
|
|
retries: 3
|
|
start_period: 5s
|
|
|
|
volumes:
|
|
ollama-judge-data:
|
|
name: ollama-judge-data
|