REST API (routes/api.py): Three endpoints behind API key auth for programmatic image generation via presets — list presets, queue generation with optional overrides, and poll job status. Shared generation logic extracted from routes/presets.py into services/generation.py so both web UI and API use the same code path. Fallback covers: library index pages now show a random generated image at reduced opacity when no cover is assigned, instead of "No Image". Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
257 lines
10 KiB
Python
257 lines
10 KiB
Python
import json
|
|
import logging
|
|
import os
|
|
import random
|
|
import secrets
|
|
import subprocess
|
|
|
|
import requests
|
|
from flask import flash, jsonify, redirect, render_template, request, session, url_for
|
|
|
|
from models import Checkpoint, Settings, db
|
|
|
|
logger = logging.getLogger('gaze')
|
|
|
|
|
|
def register_routes(app):
|
|
|
|
@app.template_global()
|
|
def random_gen_image(category, slug):
|
|
"""Return a random generated image path for a resource, or None."""
|
|
folder = os.path.join(app.config['UPLOAD_FOLDER'], category, slug)
|
|
try:
|
|
images = [f for f in os.listdir(folder)
|
|
if f.lower().endswith(('.png', '.jpg', '.jpeg', '.webp'))]
|
|
except FileNotFoundError:
|
|
return None
|
|
if not images:
|
|
return None
|
|
return f"{category}/{slug}/{random.choice(images)}"
|
|
|
|
@app.context_processor
|
|
def inject_comfyui_ws_url():
|
|
url = app.config.get('COMFYUI_URL', 'http://127.0.0.1:8188')
|
|
# If the URL is localhost/127.0.0.1, replace it with the current request's host
|
|
# so that remote clients connect to the correct machine for WebSockets.
|
|
if '127.0.0.1' in url or 'localhost' in url:
|
|
host = request.host.split(':')[0]
|
|
url = url.replace('127.0.0.1', host).replace('localhost', host)
|
|
|
|
# Convert http/https to ws/wss
|
|
ws_url = url.replace('http://', 'ws://').replace('https://', 'wss://')
|
|
return dict(COMFYUI_WS_URL=f"{ws_url}/ws")
|
|
|
|
@app.context_processor
|
|
def inject_default_checkpoint():
|
|
checkpoints = Checkpoint.query.order_by(Checkpoint.name).all()
|
|
return dict(all_checkpoints=checkpoints, default_checkpoint_path=session.get('default_checkpoint', ''))
|
|
|
|
@app.route('/set_default_checkpoint', methods=['POST'])
|
|
def set_default_checkpoint():
|
|
checkpoint_path = request.form.get('checkpoint_path', '')
|
|
session['default_checkpoint'] = checkpoint_path
|
|
session.modified = True
|
|
|
|
# Persist to database Settings so it survives across server restarts
|
|
try:
|
|
settings = Settings.query.first()
|
|
if not settings:
|
|
settings = Settings()
|
|
db.session.add(settings)
|
|
settings.default_checkpoint = checkpoint_path
|
|
db.session.commit()
|
|
logger.info("Default checkpoint saved to database: %s", checkpoint_path)
|
|
except Exception as e:
|
|
logger.error(f"Failed to persist checkpoint to database: {e}")
|
|
db.session.rollback()
|
|
|
|
# Also persist to comfy_workflow.json for backwards compatibility
|
|
try:
|
|
workflow_path = 'comfy_workflow.json'
|
|
with open(workflow_path, 'r') as f:
|
|
workflow = json.load(f)
|
|
|
|
# Update node 4 (CheckpointLoaderSimple) with the new checkpoint
|
|
if '4' in workflow and 'inputs' in workflow['4']:
|
|
workflow['4']['inputs']['ckpt_name'] = checkpoint_path
|
|
|
|
with open(workflow_path, 'w') as f:
|
|
json.dump(workflow, f, indent=2)
|
|
except Exception as e:
|
|
logger.error(f"Failed to persist checkpoint to workflow file: {e}")
|
|
|
|
return {'status': 'ok'}
|
|
|
|
|
|
@app.route('/api/status/comfyui')
|
|
def api_status_comfyui():
|
|
"""Return whether ComfyUI is reachable."""
|
|
url = app.config.get('COMFYUI_URL', 'http://127.0.0.1:8188')
|
|
try:
|
|
resp = requests.get(f'{url}/system_stats', timeout=3)
|
|
if resp.ok:
|
|
return {'status': 'ok'}
|
|
except Exception:
|
|
pass
|
|
return {'status': 'error'}
|
|
|
|
|
|
@app.route('/api/comfyui/loaded_checkpoint')
|
|
def api_comfyui_loaded_checkpoint():
|
|
"""Return the checkpoint name from the most recently completed ComfyUI job."""
|
|
url = app.config.get('COMFYUI_URL', 'http://127.0.0.1:8188')
|
|
try:
|
|
resp = requests.get(f'{url}/history', timeout=3)
|
|
if not resp.ok:
|
|
return {'checkpoint': None}
|
|
history = resp.json()
|
|
if not history:
|
|
return {'checkpoint': None}
|
|
# Sort by timestamp descending, take the most recent job
|
|
latest = max(history.values(), key=lambda j: j.get('status', {}).get('status_str', ''))
|
|
# Node "4" is the checkpoint loader in the workflow
|
|
nodes = latest.get('prompt', [None, None, {}])[2]
|
|
ckpt_name = nodes.get('4', {}).get('inputs', {}).get('ckpt_name')
|
|
return {'checkpoint': ckpt_name}
|
|
except Exception:
|
|
return {'checkpoint': None}
|
|
|
|
|
|
@app.route('/api/status/mcp')
|
|
def api_status_mcp():
|
|
"""Return whether the danbooru-mcp Docker container is running."""
|
|
try:
|
|
result = subprocess.run(
|
|
['docker', 'ps', '--filter', 'name=danbooru-mcp', '--format', '{{.Names}}'],
|
|
capture_output=True, text=True, timeout=5,
|
|
)
|
|
if 'danbooru-mcp' in result.stdout:
|
|
return {'status': 'ok'}
|
|
except Exception:
|
|
pass
|
|
return {'status': 'error'}
|
|
|
|
|
|
@app.route('/api/status/llm')
|
|
def api_status_llm():
|
|
"""Return whether the configured LLM provider is reachable."""
|
|
try:
|
|
settings = Settings.query.first()
|
|
if not settings:
|
|
return {'status': 'error', 'message': 'Settings not configured'}
|
|
|
|
is_local = settings.llm_provider != 'openrouter'
|
|
|
|
if not is_local:
|
|
# Check OpenRouter
|
|
if not settings.openrouter_api_key:
|
|
return {'status': 'error', 'message': 'API key not configured'}
|
|
|
|
# Try to fetch models list as a lightweight check
|
|
headers = {
|
|
"Authorization": f"Bearer {settings.openrouter_api_key}",
|
|
}
|
|
resp = requests.get("https://openrouter.ai/api/v1/models", headers=headers, timeout=5)
|
|
if resp.ok:
|
|
return {'status': 'ok', 'provider': 'OpenRouter'}
|
|
else:
|
|
# Check local provider (Ollama or LMStudio)
|
|
if not settings.local_base_url:
|
|
return {'status': 'error', 'message': 'Base URL not configured'}
|
|
|
|
# Try to reach the models endpoint
|
|
url = f"{settings.local_base_url.rstrip('/')}/models"
|
|
resp = requests.get(url, timeout=5)
|
|
if resp.ok:
|
|
return {'status': 'ok', 'provider': settings.llm_provider.title()}
|
|
except Exception as e:
|
|
return {'status': 'error', 'message': str(e)}
|
|
|
|
return {'status': 'error'}
|
|
|
|
|
|
@app.route('/api/status/character-mcp')
|
|
def api_status_character_mcp():
|
|
"""Return whether the character-mcp Docker container is running."""
|
|
try:
|
|
result = subprocess.run(
|
|
['docker', 'ps', '--format', '{{.Names}}'],
|
|
capture_output=True, text=True, timeout=5,
|
|
)
|
|
# Check if any container name contains 'character-mcp'
|
|
if any('character-mcp' in line for line in result.stdout.splitlines()):
|
|
return {'status': 'ok'}
|
|
except Exception:
|
|
pass
|
|
return {'status': 'error'}
|
|
|
|
|
|
@app.route('/get_openrouter_models', methods=['POST'])
|
|
def get_openrouter_models():
|
|
api_key = request.form.get('api_key')
|
|
if not api_key:
|
|
return {'error': 'API key is required'}, 400
|
|
|
|
headers = {"Authorization": f"Bearer {api_key}"}
|
|
try:
|
|
response = requests.get("https://openrouter.ai/api/v1/models", headers=headers)
|
|
response.raise_for_status()
|
|
models = response.json().get('data', [])
|
|
# Return simplified list of models
|
|
return {'models': [{'id': m['id'], 'name': m.get('name', m['id'])} for m in models]}
|
|
except Exception as e:
|
|
return {'error': str(e)}, 500
|
|
|
|
@app.route('/get_local_models', methods=['POST'])
|
|
def get_local_models():
|
|
base_url = request.form.get('base_url')
|
|
if not base_url:
|
|
return {'error': 'Base URL is required'}, 400
|
|
|
|
try:
|
|
response = requests.get(f"{base_url.rstrip('/')}/models")
|
|
response.raise_for_status()
|
|
models = response.json().get('data', [])
|
|
# Ollama/LMStudio often follow the same structure as OpenAI
|
|
return {'models': [{'id': m['id'], 'name': m.get('name', m['id'])} for m in models]}
|
|
except Exception as e:
|
|
return {'error': str(e)}, 500
|
|
|
|
@app.route('/settings', methods=['GET', 'POST'])
|
|
def settings():
|
|
settings = Settings.query.first()
|
|
if not settings:
|
|
settings = Settings()
|
|
db.session.add(settings)
|
|
db.session.commit()
|
|
|
|
if request.method == 'POST':
|
|
settings.llm_provider = request.form.get('llm_provider', 'openrouter')
|
|
settings.openrouter_api_key = request.form.get('api_key')
|
|
settings.openrouter_model = request.form.get('model')
|
|
settings.local_base_url = request.form.get('local_base_url')
|
|
settings.local_model = request.form.get('local_model')
|
|
settings.lora_dir_characters = request.form.get('lora_dir_characters') or settings.lora_dir_characters
|
|
settings.lora_dir_outfits = request.form.get('lora_dir_outfits') or settings.lora_dir_outfits
|
|
settings.lora_dir_actions = request.form.get('lora_dir_actions') or settings.lora_dir_actions
|
|
settings.lora_dir_styles = request.form.get('lora_dir_styles') or settings.lora_dir_styles
|
|
settings.lora_dir_scenes = request.form.get('lora_dir_scenes') or settings.lora_dir_scenes
|
|
settings.lora_dir_detailers = request.form.get('lora_dir_detailers') or settings.lora_dir_detailers
|
|
settings.checkpoint_dirs = request.form.get('checkpoint_dirs') or settings.checkpoint_dirs
|
|
db.session.commit()
|
|
flash('Settings updated successfully!')
|
|
return redirect(url_for('settings'))
|
|
|
|
return render_template('settings.html', settings=settings)
|
|
|
|
@app.route('/api/key/regenerate', methods=['POST'])
|
|
def regenerate_api_key():
|
|
settings = Settings.query.first()
|
|
if not settings:
|
|
settings = Settings()
|
|
db.session.add(settings)
|
|
settings.api_key = secrets.token_hex(32)
|
|
db.session.commit()
|
|
logger.info("API key regenerated")
|
|
return jsonify({'api_key': settings.api_key})
|