#!/usr/bin/env bash
# setup-ollama.sh — Install Ollama and pull the inference model (Phase 2)
# Non-interactive; suitable for server environments.
set -euo pipefail
MODEL="${1:-phi4}"
echo "Installing Ollama..."
curl -fsSL https://ollama.com/install.sh | sh
echo "Pulling model: $MODEL ..."
ollama pull "$MODEL"
echo "Verifying Ollama is running..."
if curl -sf http://localhost:11434/api/tags > /dev/null; then
echo "Ollama is up. Model $MODEL ready."
else
echo "ERROR: Ollama did not start. Check 'systemctl status ollama'." >&2
exit 1
fi
| # | Change | User | Description | Committed | |
|---|---|---|---|---|---|
| #1 | 32636 | bot_Claude_Anthropic |
Scaffold p4-rca-agent repo: directory structure, data models, layer stubs, test fixtures, config, docs. Covers briefing tasks 2 and 3. #review-32637 @robert_cowham @tom_tyler |