OpenClaw-Setup/openclaw-setup-max/setup/setup_openclaw_ollama.sh

340 lines
9.6 KiB
Bash
Executable File

#!/usr/bin/env bash
set -euo pipefail
# ===================== CONFIG =====================
# Override via env if needed:
# DATA_VOLUME=/Volumes/YourDrive ./setup/setup_openclaw_ollama.sh
# PULL_OPTIONAL_CLOUD_MODEL=true ./setup/setup_openclaw_ollama.sh
# AUTO_YES=true ./setup/setup_openclaw_ollama.sh
# PULL_LOCAL_MODELS=false ./setup/setup_openclaw_ollama.sh
DATA_VOLUME="${DATA_VOLUME:-/Volumes/Data}"
OLLAMA_DATA_TARGET="${OLLAMA_DATA_TARGET:-$DATA_VOLUME/ollama}"
OPENCLAW_DATA_TARGET="${OPENCLAW_DATA_TARGET:-$DATA_VOLUME/openclaw}"
AUTO_YES="${AUTO_YES:-false}"
PULL_LOCAL_MODELS="${PULL_LOCAL_MODELS:-ask}" # ask | true | false
# Keep this list to models that support tool calling in Ollama.
TOOL_MODELS_TO_PULL=(
"qwen3:14b"
"devstral:24b"
"gpt-oss:20b"
)
# Optional: cloud model (not local inference). Requires `ollama signin`.
OPTIONAL_CLOUD_MODEL="${OPTIONAL_CLOUD_MODEL:-minimax-m2.1:cloud}"
PULL_OPTIONAL_CLOUD_MODEL="${PULL_OPTIONAL_CLOUD_MODEL:-false}"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
TEMP_OLLAMA_PID=""
TOOL_READY_MODELS=()
log_info() { echo -e "${GREEN}$*${NC}"; }
log_warn() { echo -e "${YELLOW}$*${NC}"; }
log_err() { echo -e "${RED}$*${NC}"; }
cleanup() {
if [ -n "${TEMP_OLLAMA_PID}" ] && kill -0 "${TEMP_OLLAMA_PID}" 2>/dev/null; then
kill "${TEMP_OLLAMA_PID}" 2>/dev/null || true
fi
}
trap cleanup EXIT
require_cmd() {
local cmd="$1"
if ! command -v "$cmd" >/dev/null 2>&1; then
log_err "ERROR: Missing required command: $cmd"
exit 1
fi
}
ensure_brew() {
if command -v brew >/dev/null 2>&1; then
return 0
fi
log_err "Homebrew not found. Install it first:"
echo '/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"'
exit 1
}
prompt_yes_no() {
local prompt="$1"
local default="${2:-y}"
local reply=""
if [[ "$AUTO_YES" == "true" ]]; then
return 0
fi
if [[ ! -t 0 ]]; then
# Non-interactive shell: accept default.
[[ "$default" == "y" ]]
return $?
fi
if [[ "$default" == "y" ]]; then
read -r -p "$prompt [Y/n] " reply
reply="${reply:-Y}"
else
read -r -p "$prompt [y/N] " reply
reply="${reply:-N}"
fi
case "$reply" in
y|Y|yes|YES) return 0 ;;
*) return 1 ;;
esac
}
install_jq_if_missing() {
if command -v jq >/dev/null 2>&1; then
log_info "jq already installed ($(jq --version))."
return 0
fi
log_warn "jq not found. Installing via Homebrew..."
ensure_brew
brew install jq
}
install_python3_if_missing() {
if command -v python3 >/dev/null 2>&1; then
log_info "python3 already installed ($(python3 --version 2>/dev/null || echo python3))."
return 0
fi
log_warn "python3 not found. Installing via Homebrew..."
ensure_brew
brew install python
}
node_major_version() {
node -v 2>/dev/null | sed 's/^v//' | cut -d. -f1
}
link_data_dir() {
local source="$1"
local target="$2"
local label="$3"
mkdir -p "$target"
if [ -L "$source" ]; then
local current_link
current_link="$(readlink "$source")"
if [ "$current_link" != "$target" ]; then
rm "$source"
ln -s "$target" "$source"
log_info "${label}: updated symlink ${source} -> ${target}"
else
log_info "${label}: symlink already correct (${source} -> ${target})"
fi
return 0
fi
if [ -d "$source" ]; then
log_warn "${label}: migrating existing ${source} data to ${target}..."
if command -v rsync >/dev/null 2>&1; then
rsync -a "$source"/ "$target"/
else
cp -a "$source"/. "$target"/
fi
rm -rf "$source"
elif [ -e "$source" ]; then
log_err "ERROR: ${source} exists but is not a directory/symlink. Resolve manually."
exit 1
fi
ln -s "$target" "$source"
log_info "${label}: symlink created ${source} -> ${target}"
}
ensure_ollama_running() {
if curl -fsS "http://127.0.0.1:11434/api/tags" >/dev/null 2>&1; then
log_info "Ollama API already running on 127.0.0.1:11434."
return 0
fi
log_warn "Ollama API not running. Starting a temporary local ollama server..."
ollama serve >/tmp/openclaw-ollama-serve.log 2>&1 &
TEMP_OLLAMA_PID="$!"
local tries=0
until curl -fsS "http://127.0.0.1:11434/api/tags" >/dev/null 2>&1; do
tries=$((tries + 1))
if [ "$tries" -ge 30 ]; then
log_err "ERROR: Ollama API did not become ready in time."
log_err "See /tmp/openclaw-ollama-serve.log for details."
exit 1
fi
sleep 1
done
log_info "Temporary Ollama API started."
}
model_supports_tools() {
local model="$1"
local payload response
payload="$(printf '{"name":"%s"}' "$model")"
response="$(curl -fsS "http://127.0.0.1:11434/api/show" -H "Content-Type: application/json" -d "$payload" || true)"
echo "$response" | tr -d '\n' | grep -Eq '"capabilities"[[:space:]]*:[[:space:]]*\[[^]]*"tools"'
}
echo -e "${GREEN}=== OpenClaw + Ollama Setup Script (Data Drive Storage) ===${NC}"
echo "Current time: $(date)"
echo "Target drive: $DATA_VOLUME"
echo ""
# Step 0: Pre-flight checks
require_cmd curl
if [ ! -d "$DATA_VOLUME" ]; then
log_err "ERROR: Data drive not found at $DATA_VOLUME"
echo "Run 'ls /Volumes' and update DATA_VOLUME= in this script/environment."
exit 1
fi
# Step 1: Create target dirs and link data dirs
mkdir -p "$OLLAMA_DATA_TARGET" "$OPENCLAW_DATA_TARGET"
link_data_dir "$HOME/.ollama" "$OLLAMA_DATA_TARGET" "Ollama"
link_data_dir "$HOME/.openclaw" "$OPENCLAW_DATA_TARGET" "OpenClaw"
# Step 2: Install / verify Ollama
if ! command -v ollama >/dev/null 2>&1; then
if prompt_yes_no "Ollama not found. Install Ollama now?" "y"; then
log_warn "Installing Ollama via official script..."
curl -fsSL https://ollama.com/install.sh | sh
else
log_err "Ollama is required for local models. Aborting."
exit 1
fi
else
log_info "Ollama already installed ($(ollama --version))."
fi
# Step 3: Install / verify Node.js >= 22 for OpenClaw
need_node_install=false
if ! command -v node >/dev/null 2>&1; then
need_node_install=true
else
node_major="$(node_major_version || true)"
if ! echo "$node_major" | grep -Eq '^[0-9]+$' || [ "$node_major" -lt 22 ]; then
need_node_install=true
fi
fi
if [ "$need_node_install" = true ]; then
log_warn "Node.js >= 22 not found. Installing via Homebrew..."
ensure_brew
brew install node
fi
log_info "Node.js ready ($(node -v))."
# Step 4: Install / verify jq + python3 (used by guard scripts)
install_jq_if_missing
install_python3_if_missing
# Step 5: Install / verify OpenClaw
if ! command -v openclaw >/dev/null 2>&1; then
log_warn "OpenClaw not found. Installing via npm (non-interactive)..."
if ! command -v npm >/dev/null 2>&1; then
log_err "ERROR: npm is required but was not found."
exit 1
fi
npm install -g openclaw
else
log_info "OpenClaw already installed."
fi
# Step 6: Pull tool-capable local models (optional prompt)
should_pull_models="true"
case "$PULL_LOCAL_MODELS" in
true) should_pull_models="true" ;;
false) should_pull_models="false" ;;
ask)
if prompt_yes_no "Pull local Ollama models now? (large download)" "y"; then
should_pull_models="true"
else
should_pull_models="false"
fi
;;
*)
log_warn "Unknown PULL_LOCAL_MODELS value '$PULL_LOCAL_MODELS'; defaulting to ask."
if prompt_yes_no "Pull local Ollama models now? (large download)" "y"; then
should_pull_models="true"
else
should_pull_models="false"
fi
;;
esac
if [[ "$should_pull_models" == "true" ]]; then
ensure_ollama_running
log_warn "Pulling tool-capable local models (may take a while)..."
for model in "${TOOL_MODELS_TO_PULL[@]}"; do
if ollama pull "$model"; then
if model_supports_tools "$model"; then
TOOL_READY_MODELS+=("$model")
log_info "Model ready with tools: $model"
else
log_warn "Model pulled but does not report tools capability: $model"
fi
else
log_warn "Failed to pull model: $model"
fi
done
else
log_warn "Skipping local model pulls (PULL_LOCAL_MODELS=$PULL_LOCAL_MODELS)."
fi
# Step 7: Optional cloud model
if [ "$PULL_OPTIONAL_CLOUD_MODEL" = true ]; then
ensure_ollama_running
log_warn "Pulling optional cloud model ($OPTIONAL_CLOUD_MODEL)..."
log_warn "If prompted, run: ollama signin"
ollama pull "$OPTIONAL_CLOUD_MODEL" || log_warn "Failed to pull cloud model: $OPTIONAL_CLOUD_MODEL"
fi
echo ""
log_info "Setup complete."
echo "Detected tool-capable local models:"
if [ "${#TOOL_READY_MODELS[@]}" -eq 0 ]; then
echo " (none detected)"
else
for model in "${TOOL_READY_MODELS[@]}"; do
echo " - $model"
done
fi
echo ""
echo "Next steps (Telegram + OpenClaw):"
echo "1. Export Ollama auth marker (required by OpenClaw provider discovery):"
echo ' export OLLAMA_API_KEY="ollama-local"'
echo "2. Run OpenClaw onboarding and configure Telegram (@topdoglabs_bot token):"
echo " openclaw wizard # or: openclaw onboard"
echo "3. Choose a local model that supports tools (avoid deepseek-coder-v2:16b):"
if [ "${#TOOL_READY_MODELS[@]}" -gt 0 ]; then
echo " openclaw models set ollama/${TOOL_READY_MODELS[0]}"
else
echo " openclaw models set ollama/qwen3:14b"
fi
echo "4. Start gateway:"
echo " openclaw gateway"
echo " # optional daemon install: openclaw onboard --install-daemon"
echo "5. Approve Telegram pairing on first DM:"
echo " openclaw pairing list telegram"
echo " openclaw pairing approve telegram <CODE>"
echo ""
echo "Verification:"
echo " ls -l ~/.ollama"
echo " ls -l ~/.openclaw"
echo " ollama list"
echo " openclaw models list"
echo " openclaw logs --follow"
echo ""
log_warn "Important: Keep $DATA_VOLUME mounted whenever using Ollama/OpenClaw."
echo "MiniMax note: Ollama's minimax models are cloud-tagged (not local inference)."