Files
homeai/homeai-llm/scripts/import-local-models.sh
Aodhan Collins c4f3dbed77 Phase 4: OpenClaw agent setup + expanded model catalog
- Add OpenClaw launchd plist (gateway on port 8080)
- Update Llama-3.3-70B Modelfile: fix FROM path, add tool-calling TEMPLATE,
  set num_ctx 32768 (fits 70B in 64GB with safe headroom)
- Update Codestral-22B and Qwen3-32B Modelfiles
- Add Modelfiles for all models in ~/Models/LLM and ~/Models/MLX:
  EVA-LLaMA-3.33-70B, Midnight-Miqu-70B, QwQ-32B, Qwen3.5-35B,
  Qwen3-Coder-30B, Qwen3-Coder-Next, Qwen3-VL-30B, GLM-4.6V-Flash,
  DeepSeek-R1-8B, gemma-3-27b, and MLX variants
- Add import-local-models.sh helper script

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-06 00:03:06 +00:00

65 lines
2.0 KiB
Bash
Executable File
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env bash
# scripts/import-local-models.sh — Create local Ollama models from Modelfiles
#
# This script scans the modelfiles/ directory and uses `ollama create`
# to import each model configuration into Ollama. The parameters (like
# temperature, context window) and system prompts defined in these files
# will automatically be inherited by Open WebUI.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
MODELFILES_DIR="${REPO_DIR}/modelfiles"
# Fallback logger if not available
log_section() { echo -e "\n\033[1;34m=== $1 ===\033[0m"; }
log_step() { echo -e "\033[1;36m==>\033[0m $1"; }
log_info() { echo -e "\033[1m\033[0m $1"; }
log_success() { echo -e "\033[1;32m✔\033[0m $1"; }
log_warn() { echo -e "\033[1;33m⚠\033[0m $1"; }
log_error() { echo -e "\033[1;31m✖\033[0m $1"; }
die() { log_error "$1"; exit 1; }
command_exists() { command -v "$1" >/dev/null 2>&1; }
if ! command_exists ollama; then
die "Ollama not found. Run: bash setup.sh first."
fi
if ! curl -sf http://localhost:11434 -o /dev/null; then
die "Ollama is not running. Start it first."
fi
log_section "Importing Local Models"
total=0; created=0; failed=0
# Iterate over all files in modelfiles/ that are not directories and not .DS_Store
for modelfile_path in "$MODELFILES_DIR"/*; do
if [[ -f "$modelfile_path" ]] && [[ "$(basename "$modelfile_path")" != ".DS_Store" ]]; then
model_name="$(basename "$modelfile_path")"
total=$((total + 1))
log_step "Creating model: $model_name"
if ollama create "$model_name" -f "$modelfile_path"; then
log_success "Created $model_name"
created=$((created + 1))
else
log_error "Failed to create $model_name"
failed=$((failed + 1))
fi
fi
done
echo ""
log_info "Import complete: ${created} created, ${failed} failed (of ${total} total)"
if [[ $failed -gt 0 ]]; then
log_warn "Some models failed to create. Check the output above."
exit 1
fi
echo ""
log_info "Available models:"
ollama list