mirror of
https://github.com/dogkeeper886/ollama37.git
synced 2025-12-18 03:37:09 +00:00
Add LLM Judge container infrastructure
- Add cicd/docker-compose.judge.yml for stable reference Ollama - Runs on port 11435 (separate from test subject on 11434) - Uses dogkeeper886/ollama37:latest from DockerHub - Add cicd/README.md documenting CI infrastructure 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
33
cicd/docker-compose.judge.yml
Normal file
33
cicd/docker-compose.judge.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
services:
|
||||
# LLM Judge - stable reference version for evaluating test results
|
||||
# Runs on port 11435 to avoid conflict with test subject (11434)
|
||||
ollama-judge:
|
||||
image: dogkeeper886/ollama37:latest
|
||||
container_name: ollama37-judge
|
||||
runtime: nvidia
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: all
|
||||
capabilities: [gpu]
|
||||
ports:
|
||||
- "11435:11434"
|
||||
volumes:
|
||||
- ollama-judge-data:/root/.ollama
|
||||
environment:
|
||||
- OLLAMA_HOST=0.0.0.0:11434
|
||||
- NVIDIA_VISIBLE_DEVICES=all
|
||||
- NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "/usr/local/bin/ollama", "list"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
|
||||
volumes:
|
||||
ollama-judge-data:
|
||||
name: ollama-judge-data
|
||||
Reference in New Issue
Block a user