Files
character-browser/services/comfyui.py
Aodhan Collins 29a6723b25 Code review fixes: wardrobe migration, response validation, path traversal guard, deduplication
- Migrate 11 character JSONs from old wardrobe keys to _BODY_GROUP_KEYS format
- Add is_favourite/is_nsfw columns to Preset model
- Add HTTP response validation and timeouts to ComfyUI client
- Add path traversal protection on replace cover route
- Deduplicate services/mcp.py (4 functions → 2 generic + 2 wrappers)
- Extract apply_library_filters() and clean_html_text() shared helpers
- Add named constants for 17 ComfyUI workflow node IDs
- Fix bare except clauses in services/llm.py
- Fix tags schema in ensure_default_outfit() (list → dict)
- Convert f-string logging to lazy % formatting
- Add 5-minute polling timeout to frontend waitForJob()
- Improve migration error handling (non-duplicate errors log at WARNING)
- Update CLAUDE.md to reflect all changes

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 00:31:27 +00:00

141 lines
5.7 KiB
Python

import json
import logging
import requests
from flask import current_app
from services.workflow import NODE_CHECKPOINT
logger = logging.getLogger('gaze')
def get_loaded_checkpoint():
"""Return the checkpoint path currently loaded in ComfyUI, or None."""
try:
url = current_app.config.get('COMFYUI_URL', 'http://127.0.0.1:8188')
resp = requests.get(f'{url}/history', timeout=3)
if resp.ok:
history = resp.json()
if history:
# Sort by prompt ID (numeric string) to get the most recent job
latest_id = max(history.keys())
latest = history[latest_id]
nodes = latest.get('prompt', [None, None, {}])[2]
return nodes.get(NODE_CHECKPOINT, {}).get('inputs', {}).get('ckpt_name')
except Exception:
pass
return None
def _ensure_checkpoint_loaded(checkpoint_path):
"""Check if the desired checkpoint is loaded in ComfyUI, and force reload if not."""
if not checkpoint_path:
return
try:
# Get currently loaded checkpoint from ComfyUI history
url = current_app.config.get('COMFYUI_URL', 'http://127.0.0.1:8188')
resp = requests.get(f'{url}/history', timeout=3)
if resp.ok:
history = resp.json()
if history:
latest_id = max(history.keys())
latest = history[latest_id]
nodes = latest.get('prompt', [None, None, {}])[2]
loaded_ckpt = nodes.get(NODE_CHECKPOINT, {}).get('inputs', {}).get('ckpt_name')
# If the loaded checkpoint matches what we want, no action needed
if loaded_ckpt == checkpoint_path:
logger.info("Checkpoint %s already loaded in ComfyUI", checkpoint_path)
return
# Checkpoint doesn't match or couldn't determine - force unload all models
logger.info("Forcing ComfyUI to unload models to ensure %s loads", checkpoint_path)
requests.post(f'{url}/free', json={'unload_models': True}, timeout=5)
except Exception as e:
logger.warning("Failed to check/force checkpoint reload: %s", e)
def queue_prompt(prompt_workflow, client_id=None):
"""POST a workflow to ComfyUI's /prompt endpoint."""
# Ensure the checkpoint in the workflow is loaded in ComfyUI
checkpoint_path = prompt_workflow.get(NODE_CHECKPOINT, {}).get('inputs', {}).get('ckpt_name')
_ensure_checkpoint_loaded(checkpoint_path)
p = {"prompt": prompt_workflow}
if client_id:
p["client_id"] = client_id
url = current_app.config['COMFYUI_URL']
# Log the full request being sent to ComfyUI
logger.debug("=" * 80)
logger.debug("COMFYUI REQUEST - Sending prompt to %s/prompt", url)
logger.debug("Checkpoint: %s", checkpoint_path)
logger.debug("Client ID: %s", client_id if client_id else "(none)")
logger.debug("Full workflow JSON:")
logger.debug(json.dumps(prompt_workflow, indent=2))
logger.debug("=" * 80)
data = json.dumps(p).encode('utf-8')
response = requests.post(f"{url}/prompt", data=data, timeout=30)
if not response.ok:
logger.error("ComfyUI returned HTTP %s: %s", response.status_code, response.text[:500])
raise RuntimeError(f"ComfyUI returned HTTP {response.status_code}")
response_json = response.json()
# Log the response from ComfyUI
logger.debug("COMFYUI RESPONSE - Status: %s", response.status_code)
logger.debug("Response JSON: %s", json.dumps(response_json, indent=2))
if 'prompt_id' in response_json:
logger.info("ComfyUI accepted prompt with ID: %s", response_json['prompt_id'])
else:
logger.error("ComfyUI rejected prompt: %s", response_json)
logger.debug("=" * 80)
return response_json
def get_history(prompt_id):
"""Poll ComfyUI /history for results of a given prompt_id."""
url = current_app.config['COMFYUI_URL']
response = requests.get(f"{url}/history/{prompt_id}", timeout=10)
history_json = response.json()
# Log detailed history response for debugging
if prompt_id in history_json:
logger.debug("=" * 80)
logger.debug("COMFYUI HISTORY - Prompt ID: %s", prompt_id)
logger.debug("Status: %s", response.status_code)
# Extract key information from the history
prompt_data = history_json[prompt_id]
if 'status' in prompt_data:
logger.debug("Generation status: %s", prompt_data['status'])
if 'outputs' in prompt_data:
logger.debug("Outputs available: %s", list(prompt_data['outputs'].keys()))
for node_id, output in prompt_data['outputs'].items():
if 'images' in output:
logger.debug(" Node %s produced %d image(s)", node_id, len(output['images']))
for img in output['images']:
logger.debug(" - %s (subfolder: %s, type: %s)",
img.get('filename'), img.get('subfolder'), img.get('type'))
logger.debug("Full history response:")
logger.debug(json.dumps(history_json, indent=2))
logger.debug("=" * 80)
else:
logger.debug("History not yet available for prompt ID: %s", prompt_id)
return history_json
def get_image(filename, subfolder, folder_type):
"""Retrieve a generated image from ComfyUI's /view endpoint."""
url = current_app.config['COMFYUI_URL']
data = {"filename": filename, "subfolder": subfolder, "type": folder_type}
logger.debug("Fetching image from ComfyUI: filename=%s, subfolder=%s, type=%s",
filename, subfolder, folder_type)
response = requests.get(f"{url}/view", params=data, timeout=30)
logger.debug("Image retrieved: %d bytes (status: %s)", len(response.content), response.status_code)
return response.content