Multiple bug fixes.

This commit is contained in:
Aodhan Collins
2026-03-06 19:28:50 +00:00
parent ec08eb5d31
commit d95b81dde5
24 changed files with 601 additions and 133 deletions

View File

@@ -298,10 +298,10 @@ All generation routes use the background job queue. Frontend polls:
Image retrieval is handled server-side by the `_make_finalize()` callback; there are no separate client-facing finalize routes. Image retrieval is handled server-side by the `_make_finalize()` callback; there are no separate client-facing finalize routes.
### Utilities ### Utilities
- `POST /set_default_checkpoint` — save default checkpoint to session - `POST /set_default_checkpoint` — save default checkpoint to session and persist to `comfy_workflow.json`
- `GET /get_missing_{characters,outfits,actions,scenes}` — AJAX: list items without cover images - `GET /get_missing_{characters,outfits,actions,scenes,styles,detailers,looks,checkpoints}` — AJAX: list items without cover images (sorted by display name)
- `POST /generate_missing` — batch generate covers for all characters missing one (uses job queue) - `POST /generate_missing` — batch generate covers for all characters missing one (uses job queue)
- `POST /clear_all_covers` / `clear_all_{outfit,action,scene}_covers` - `POST /clear_all_covers` / `clear_all_{outfit,action,scene,style,detailer,look,checkpoint}_covers`
- `GET /gallery` — global image gallery browsing `static/uploads/` - `GET /gallery` — global image gallery browsing `static/uploads/`
- `GET/POST /settings` — LLM provider configuration - `GET/POST /settings` — LLM provider configuration
- `POST /resource/<category>/<slug>/delete` — soft (JSON only) or hard (JSON + safetensors) delete - `POST /resource/<category>/<slug>/delete` — soft (JSON only) or hard (JSON + safetensors) delete
@@ -321,6 +321,11 @@ Image retrieval is handled server-side by the `_make_finalize()` callback; there
- Context processors inject `all_checkpoints`, `default_checkpoint_path`, and `COMFYUI_WS_URL` into every template. - Context processors inject `all_checkpoints`, `default_checkpoint_path`, and `COMFYUI_WS_URL` into every template.
- **No `{% block head %}` exists** in layout.html — do not try to use it. - **No `{% block head %}` exists** in layout.html — do not try to use it.
- Generation is async: JS submits the form via AJAX (`X-Requested-With: XMLHttpRequest`), receives a `{"job_id": ...}` response, then polls `/api/queue/<job_id>/status` every ~1.5 seconds until `status == "done"`. The server-side worker handles all ComfyUI polling and image saving via the `_make_finalize()` callback. There are no client-facing finalize HTTP routes. - Generation is async: JS submits the form via AJAX (`X-Requested-With: XMLHttpRequest`), receives a `{"job_id": ...}` response, then polls `/api/queue/<job_id>/status` every ~1.5 seconds until `status == "done"`. The server-side worker handles all ComfyUI polling and image saving via the `_make_finalize()` callback. There are no client-facing finalize HTTP routes.
- **Batch generation** (library pages): Uses a two-phase pattern:
1. **Queue phase**: All jobs are submitted upfront via sequential fetch calls, collecting job IDs
2. **Poll phase**: All jobs are polled concurrently via `Promise.all()`, updating UI as each completes
3. **Progress tracking**: Displays currently processing items in real-time using a `Set` to track active jobs
4. **Sorting**: All batch operations sort items by display `name` (not `filename`) for better UX
--- ---

434
app.py
View File

@@ -47,12 +47,16 @@ Session(app)
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Logging # Logging
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
log_level_str = os.environ.get('LOG_LEVEL', 'INFO').upper()
log_level = getattr(logging, log_level_str, logging.INFO)
logging.basicConfig( logging.basicConfig(
level=logging.INFO, level=log_level,
format='%(asctime)s [%(levelname)s] %(name)s: %(message)s', format='%(asctime)s [%(levelname)s] %(name)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S', datefmt='%Y-%m-%d %H:%M:%S',
) )
logger = logging.getLogger('gaze') logger = logging.getLogger('gaze')
logger.setLevel(log_level)
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Generation Job Queue # Generation Job Queue
@@ -117,11 +121,43 @@ def _queue_worker():
with _job_queue_lock: with _job_queue_lock:
job['status'] = 'processing' job['status'] = 'processing'
logger.info("Job started: [%s] %s", job['id'][:8], job['label']) logger.info("=" * 80)
logger.info("JOB STARTED: [%s] %s", job['id'][:8], job['label'])
logger.info("Job created at: %s", time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(job['created_at'])))
# Log workflow summary before sending to ComfyUI
workflow = job['workflow']
logger.info("Workflow summary:")
logger.info(" Checkpoint: %s", workflow.get('4', {}).get('inputs', {}).get('ckpt_name', '(not set)'))
logger.info(" Seed: %s", workflow.get('3', {}).get('inputs', {}).get('seed', '(not set)'))
logger.info(" Resolution: %sx%s",
workflow.get('5', {}).get('inputs', {}).get('width', '?'),
workflow.get('5', {}).get('inputs', {}).get('height', '?'))
logger.info(" Sampler: %s / %s (steps=%s, cfg=%s)",
workflow.get('3', {}).get('inputs', {}).get('sampler_name', '?'),
workflow.get('3', {}).get('inputs', {}).get('scheduler', '?'),
workflow.get('3', {}).get('inputs', {}).get('steps', '?'),
workflow.get('3', {}).get('inputs', {}).get('cfg', '?'))
# Log active LoRAs
active_loras = []
for node_id, label_str in [("16", "char/look"), ("17", "outfit"), ("18", "action"), ("19", "style/detail/scene")]:
if node_id in workflow:
lora_name = workflow[node_id]["inputs"].get("lora_name", "")
if lora_name:
strength = workflow[node_id]["inputs"].get("strength_model", "?")
active_loras.append(f"{label_str}:{lora_name.split('/')[-1]}@{strength}")
logger.info(" Active LoRAs: %s", ' | '.join(active_loras) if active_loras else '(none)')
# Log prompts
logger.info(" Positive prompt: %s", workflow.get('6', {}).get('inputs', {}).get('text', '(not set)')[:200])
logger.info(" Negative prompt: %s", workflow.get('7', {}).get('inputs', {}).get('text', '(not set)')[:200])
logger.info("=" * 80)
try: try:
with app.app_context(): with app.app_context():
# Send workflow to ComfyUI # Send workflow to ComfyUI
logger.info("Sending workflow to ComfyUI...")
prompt_response = queue_prompt(job['workflow']) prompt_response = queue_prompt(job['workflow'])
if 'prompt_id' not in prompt_response: if 'prompt_id' not in prompt_response:
raise Exception(f"ComfyUI rejected job: {prompt_response.get('error', 'unknown error')}") raise Exception(f"ComfyUI rejected job: {prompt_response.get('error', 'unknown error')}")
@@ -134,11 +170,19 @@ def _queue_worker():
# Poll until done (max ~10 minutes) # Poll until done (max ~10 minutes)
max_retries = 300 max_retries = 300
finished = False finished = False
poll_count = 0
logger.info("Polling ComfyUI for completion (max %d retries, 2s interval)...", max_retries)
while max_retries > 0: while max_retries > 0:
history = get_history(comfy_id) history = get_history(comfy_id)
if comfy_id in history: if comfy_id in history:
finished = True finished = True
logger.info("Generation completed after %d polls (%d seconds)",
poll_count, poll_count * 2)
break break
poll_count += 1
if poll_count % 10 == 0: # Log every 20 seconds
logger.info("Still waiting for generation... (%d polls, %d seconds elapsed)",
poll_count, poll_count * 2)
time.sleep(2) time.sleep(2)
max_retries -= 1 max_retries -= 1
@@ -152,10 +196,14 @@ def _queue_worker():
with _job_queue_lock: with _job_queue_lock:
job['status'] = 'done' job['status'] = 'done'
logger.info("Job done: [%s] %s", job['id'][:8], job['label']) logger.info("=" * 80)
logger.info("JOB COMPLETED: [%s] %s", job['id'][:8], job['label'])
logger.info("=" * 80)
except Exception as e: except Exception as e:
logger.exception("Job failed: [%s] %s%s", job['id'][:8], job['label'], e) logger.error("=" * 80)
logger.exception("JOB FAILED: [%s] %s%s", job['id'][:8], job['label'], e)
logger.error("=" * 80)
with _job_queue_lock: with _job_queue_lock:
job['status'] = 'failed' job['status'] = 'failed'
job['error'] = str(e) job['error'] = str(e)
@@ -185,28 +233,59 @@ def _make_finalize(category, slug, db_model_class=None, action=None):
action — 'replace' → update DB; None → always update; anything else → skip action — 'replace' → update DB; None → always update; anything else → skip
""" """
def _finalize(comfy_prompt_id, job): def _finalize(comfy_prompt_id, job):
logger.debug("=" * 80)
logger.debug("FINALIZE - Starting finalization for prompt ID: %s", comfy_prompt_id)
logger.debug("Category: %s, Slug: %s, Action: %s", category, slug, action)
history = get_history(comfy_prompt_id) history = get_history(comfy_prompt_id)
outputs = history[comfy_prompt_id]['outputs'] outputs = history[comfy_prompt_id]['outputs']
for node_output in outputs.values():
logger.debug("Processing outputs from %d node(s)", len(outputs))
for node_id, node_output in outputs.items():
logger.debug(" Node %s: %s", node_id, list(node_output.keys()))
if 'images' in node_output: if 'images' in node_output:
logger.debug(" Found %d image(s) in node %s", len(node_output['images']), node_id)
image_info = node_output['images'][0] image_info = node_output['images'][0]
logger.debug(" Image info: filename=%s, subfolder=%s, type=%s",
image_info['filename'], image_info['subfolder'], image_info['type'])
image_data = get_image(image_info['filename'], image_info['subfolder'], image_info['type']) image_data = get_image(image_info['filename'], image_info['subfolder'], image_info['type'])
folder = os.path.join(app.config['UPLOAD_FOLDER'], f"{category}/{slug}") folder = os.path.join(app.config['UPLOAD_FOLDER'], f"{category}/{slug}")
os.makedirs(folder, exist_ok=True) os.makedirs(folder, exist_ok=True)
filename = f"gen_{int(time.time())}.png" filename = f"gen_{int(time.time())}.png"
with open(os.path.join(folder, filename), 'wb') as f: full_path = os.path.join(folder, filename)
logger.debug(" Saving image to: %s", full_path)
with open(full_path, 'wb') as f:
f.write(image_data) f.write(image_data)
logger.info("Image saved: %s (%d bytes)", full_path, len(image_data))
relative_path = f"{category}/{slug}/{filename}" relative_path = f"{category}/{slug}/{filename}"
job['result'] = { job['result'] = {
'image_url': f'/static/uploads/{relative_path}', 'image_url': f'/static/uploads/{relative_path}',
'relative_path': relative_path, 'relative_path': relative_path,
} }
if db_model_class and (action is None or action == 'replace'): if db_model_class and (action is None or action == 'replace'):
logger.debug(" Updating database: %s.image_path = %s", db_model_class.__name__, relative_path)
obj = db_model_class.query.filter_by(slug=slug).first() obj = db_model_class.query.filter_by(slug=slug).first()
if obj: if obj:
obj.image_path = relative_path obj.image_path = relative_path
db.session.commit() db.session.commit()
logger.debug(" Database updated successfully")
else:
logger.warning(" Object not found in database: %s(slug=%s)", db_model_class.__name__, slug)
else:
logger.debug(" Skipping database update (db_model_class=%s, action=%s)",
db_model_class.__name__ if db_model_class else None, action)
logger.debug("FINALIZE - Completed successfully")
logger.debug("=" * 80)
return return
logger.warning("FINALIZE - No images found in outputs!")
logger.debug("=" * 80)
return _finalize return _finalize
@@ -402,8 +481,38 @@ def inject_default_checkpoint():
@app.route('/set_default_checkpoint', methods=['POST']) @app.route('/set_default_checkpoint', methods=['POST'])
def set_default_checkpoint(): def set_default_checkpoint():
session['default_checkpoint'] = request.form.get('checkpoint_path', '') checkpoint_path = request.form.get('checkpoint_path', '')
session['default_checkpoint'] = checkpoint_path
session.modified = True session.modified = True
# Persist to database Settings so it survives across server restarts
try:
settings = Settings.query.first()
if not settings:
settings = Settings()
db.session.add(settings)
settings.default_checkpoint = checkpoint_path
db.session.commit()
logger.info("Default checkpoint saved to database: %s", checkpoint_path)
except Exception as e:
logger.error(f"Failed to persist checkpoint to database: {e}")
db.session.rollback()
# Also persist to comfy_workflow.json for backwards compatibility
try:
workflow_path = 'comfy_workflow.json'
with open(workflow_path, 'r') as f:
workflow = json.load(f)
# Update node 4 (CheckpointLoaderSimple) with the new checkpoint
if '4' in workflow and 'inputs' in workflow['4']:
workflow['4']['inputs']['ckpt_name'] = checkpoint_path
with open(workflow_path, 'w') as f:
json.dump(workflow, f, indent=2)
except Exception as e:
logger.error(f"Failed to persist checkpoint to workflow file: {e}")
return {'status': 'ok'} return {'status': 'ok'}
@@ -455,6 +564,44 @@ def api_status_mcp():
pass pass
return {'status': 'error'} return {'status': 'error'}
@app.route('/api/status/llm')
def api_status_llm():
"""Return whether the configured LLM provider is reachable."""
try:
settings = Settings.query.first()
if not settings:
return {'status': 'error', 'message': 'Settings not configured'}
is_local = settings.llm_provider != 'openrouter'
if not is_local:
# Check OpenRouter
if not settings.openrouter_api_key:
return {'status': 'error', 'message': 'API key not configured'}
# Try to fetch models list as a lightweight check
headers = {
"Authorization": f"Bearer {settings.openrouter_api_key}",
}
resp = requests.get("https://openrouter.ai/api/v1/models", headers=headers, timeout=5)
if resp.ok:
return {'status': 'ok', 'provider': 'OpenRouter'}
else:
# Check local provider (Ollama or LMStudio)
if not settings.local_base_url:
return {'status': 'error', 'message': 'Base URL not configured'}
# Try to reach the models endpoint
url = f"{settings.local_base_url.rstrip('/')}/models"
resp = requests.get(url, timeout=5)
if resp.ok:
return {'status': 'ok', 'provider': settings.llm_provider.title()}
except Exception as e:
return {'status': 'error', 'message': str(e)}
return {'status': 'error'}
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif', 'webp'} ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif', 'webp'}
_LORA_DEFAULTS = { _LORA_DEFAULTS = {
@@ -622,6 +769,44 @@ def _append_background(prompts, character=None):
bg = f"{primary_color} simple background" if primary_color else "simple background" bg = f"{primary_color} simple background" if primary_color else "simple background"
prompts['main'] = f"{prompts['main']}, {bg}" prompts['main'] = f"{prompts['main']}, {bg}"
def _count_look_assignments():
"""Return a dict mapping look_id to the count of characters it's assigned to."""
# Looks are assigned via the character_id field in the Look model
assignment_counts = {}
looks = Look.query.all()
for look in looks:
if look.character_id:
assignment_counts[look.look_id] = 1 # Each look is assigned to at most one character
else:
assignment_counts[look.look_id] = 0
return assignment_counts
def _count_outfit_lora_assignments():
"""Return a dict mapping outfit LoRA filename to the count of characters using it."""
assignment_counts = {}
characters = Character.query.all()
for character in characters:
# Check character's own LoRA (in case it's actually an outfit LoRA)
char_lora = character.data.get('lora', {}).get('lora_name', '')
if char_lora and 'Clothing' in char_lora:
assignment_counts[char_lora] = assignment_counts.get(char_lora, 0) + 1
# Check all wardrobe outfits for LoRA references
wardrobe = character.data.get('wardrobe', {})
# Handle both nested (new) and flat (legacy) wardrobe formats
if 'default' in wardrobe and isinstance(wardrobe.get('default'), dict):
# New nested format - check each outfit
for outfit_name, outfit_data in wardrobe.items():
if isinstance(outfit_data, dict):
outfit_lora = outfit_data.get('lora', {})
if isinstance(outfit_lora, dict):
lora_name = outfit_lora.get('lora_name', '')
if lora_name:
assignment_counts[lora_name] = assignment_counts.get(lora_name, 0) + 1
return assignment_counts
def build_prompt(data, selected_fields=None, default_fields=None, active_outfit='default'): def build_prompt(data, selected_fields=None, default_fields=None, active_outfit='default'):
def is_selected(section, key): def is_selected(section, key):
# Priority: # Priority:
@@ -745,21 +930,104 @@ def build_prompt(data, selected_fields=None, default_fields=None, active_outfit=
"hand": _dedup_tags(", ".join(hand_parts)) "hand": _dedup_tags(", ".join(hand_parts))
} }
def _ensure_checkpoint_loaded(checkpoint_path):
"""Check if the desired checkpoint is loaded in ComfyUI, and force reload if not."""
if not checkpoint_path:
return
try:
# Get currently loaded checkpoint from ComfyUI history
url = app.config.get('COMFYUI_URL', 'http://127.0.0.1:8188')
resp = requests.get(f'{url}/history', timeout=3)
if resp.ok:
history = resp.json()
if history:
latest = max(history.values(), key=lambda j: j.get('status', {}).get('status_str', ''))
nodes = latest.get('prompt', [None, None, {}])[2]
loaded_ckpt = nodes.get('4', {}).get('inputs', {}).get('ckpt_name')
# If the loaded checkpoint matches what we want, no action needed
if loaded_ckpt == checkpoint_path:
logger.info(f"Checkpoint {checkpoint_path} already loaded in ComfyUI")
return
# Checkpoint doesn't match or couldn't determine - force unload all models
logger.info(f"Forcing ComfyUI to unload models to ensure {checkpoint_path} loads")
requests.post(f'{url}/free', json={'unload_models': True}, timeout=5)
except Exception as e:
logger.warning(f"Failed to check/force checkpoint reload: {e}")
def queue_prompt(prompt_workflow, client_id=None): def queue_prompt(prompt_workflow, client_id=None):
# Ensure the checkpoint in the workflow is loaded in ComfyUI
checkpoint_path = prompt_workflow.get('4', {}).get('inputs', {}).get('ckpt_name')
_ensure_checkpoint_loaded(checkpoint_path)
p = {"prompt": prompt_workflow} p = {"prompt": prompt_workflow}
if client_id: if client_id:
p["client_id"] = client_id p["client_id"] = client_id
# Log the full request being sent to ComfyUI
logger.debug("=" * 80)
logger.debug("COMFYUI REQUEST - Sending prompt to %s/prompt", app.config['COMFYUI_URL'])
logger.debug("Checkpoint: %s", checkpoint_path)
logger.debug("Client ID: %s", client_id if client_id else "(none)")
logger.debug("Full workflow JSON:")
logger.debug(json.dumps(prompt_workflow, indent=2))
logger.debug("=" * 80)
data = json.dumps(p).encode('utf-8') data = json.dumps(p).encode('utf-8')
response = requests.post(f"{app.config['COMFYUI_URL']}/prompt", data=data) response = requests.post(f"{app.config['COMFYUI_URL']}/prompt", data=data)
return response.json() response_json = response.json()
# Log the response from ComfyUI
logger.debug("COMFYUI RESPONSE - Status: %s", response.status_code)
logger.debug("Response JSON: %s", json.dumps(response_json, indent=2))
if 'prompt_id' in response_json:
logger.info("ComfyUI accepted prompt with ID: %s", response_json['prompt_id'])
else:
logger.error("ComfyUI rejected prompt: %s", response_json)
logger.debug("=" * 80)
return response_json
def get_history(prompt_id): def get_history(prompt_id):
response = requests.get(f"{app.config['COMFYUI_URL']}/history/{prompt_id}") response = requests.get(f"{app.config['COMFYUI_URL']}/history/{prompt_id}")
return response.json() history_json = response.json()
# Log detailed history response for debugging
if prompt_id in history_json:
logger.debug("=" * 80)
logger.debug("COMFYUI HISTORY - Prompt ID: %s", prompt_id)
logger.debug("Status: %s", response.status_code)
# Extract key information from the history
prompt_data = history_json[prompt_id]
if 'status' in prompt_data:
logger.debug("Generation status: %s", prompt_data['status'])
if 'outputs' in prompt_data:
logger.debug("Outputs available: %s", list(prompt_data['outputs'].keys()))
for node_id, output in prompt_data['outputs'].items():
if 'images' in output:
logger.debug(" Node %s produced %d image(s)", node_id, len(output['images']))
for img in output['images']:
logger.debug(" - %s (subfolder: %s, type: %s)",
img.get('filename'), img.get('subfolder'), img.get('type'))
logger.debug("Full history response:")
logger.debug(json.dumps(history_json, indent=2))
logger.debug("=" * 80)
else:
logger.debug("History not yet available for prompt ID: %s", prompt_id)
return history_json
def get_image(filename, subfolder, folder_type): def get_image(filename, subfolder, folder_type):
data = {"filename": filename, "subfolder": subfolder, "type": folder_type} data = {"filename": filename, "subfolder": subfolder, "type": folder_type}
logger.debug("Fetching image from ComfyUI: filename=%s, subfolder=%s, type=%s",
filename, subfolder, folder_type)
response = requests.get(f"{app.config['COMFYUI_URL']}/view", params=data) response = requests.get(f"{app.config['COMFYUI_URL']}/view", params=data)
logger.debug("Image retrieved: %d bytes (status: %s)", len(response.content), response.status_code)
return response.content return response.content
from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.orm.attributes import flag_modified
@@ -2218,30 +2486,87 @@ def _log_workflow_prompts(label, workflow):
"""Log the final assembled ComfyUI prompts in a consistent, readable block.""" """Log the final assembled ComfyUI prompts in a consistent, readable block."""
sep = "=" * 72 sep = "=" * 72
active_loras = [] active_loras = []
lora_details = []
# Collect detailed LoRA information
for node_id, label_str in [("16", "char/look"), ("17", "outfit"), ("18", "action"), ("19", "style/detail/scene")]: for node_id, label_str in [("16", "char/look"), ("17", "outfit"), ("18", "action"), ("19", "style/detail/scene")]:
if node_id in workflow: if node_id in workflow:
name = workflow[node_id]["inputs"].get("lora_name", "") name = workflow[node_id]["inputs"].get("lora_name", "")
if name: if name:
w = workflow[node_id]["inputs"].get("strength_model", "?") strength_model = workflow[node_id]["inputs"].get("strength_model", "?")
active_loras.append(f"{label_str}:{name.split('/')[-1]}@{w:.3f}" if isinstance(w, float) else f"{label_str}:{name.split('/')[-1]}@{w}") strength_clip = workflow[node_id]["inputs"].get("strength_clip", "?")
# Short version for summary
if isinstance(strength_model, float):
active_loras.append(f"{label_str}:{name.split('/')[-1]}@{strength_model:.3f}")
else:
active_loras.append(f"{label_str}:{name.split('/')[-1]}@{strength_model}")
# Detailed version
lora_details.append(f" Node {node_id} ({label_str}): {name}")
lora_details.append(f" strength_model={strength_model}, strength_clip={strength_clip}")
# Extract VAE information
vae_info = "(integrated)"
if '21' in workflow:
vae_info = workflow['21']['inputs'].get('vae_name', '(custom)')
# Extract adetailer information
adetailer_info = []
for node_id, node_name in [("11", "Face"), ("13", "Hand")]:
if node_id in workflow:
adetailer_info.append(f" {node_name} (Node {node_id}): steps={workflow[node_id]['inputs'].get('steps', '?')}, "
f"cfg={workflow[node_id]['inputs'].get('cfg', '?')}, "
f"denoise={workflow[node_id]['inputs'].get('denoise', '?')}")
face_text = workflow.get('14', {}).get('inputs', {}).get('text', '') face_text = workflow.get('14', {}).get('inputs', {}).get('text', '')
hand_text = workflow.get('15', {}).get('inputs', {}).get('text', '') hand_text = workflow.get('15', {}).get('inputs', {}).get('text', '')
lines = [ lines = [
sep, sep,
f" WORKFLOW PROMPTS [{label}]", f" WORKFLOW PROMPTS [{label}]",
sep, sep,
f" Checkpoint : {workflow['4']['inputs'].get('ckpt_name', '(not set)')}", " MODEL CONFIGURATION:",
f" Seed : {workflow['3']['inputs'].get('seed', '(not set)')}", f" Checkpoint : {workflow['4']['inputs'].get('ckpt_name', '(not set)')}",
f" Resolution : {workflow['5']['inputs'].get('width', '?')} x {workflow['5']['inputs'].get('height', '?')}", f" VAE : {vae_info}",
f" Sampler : {workflow['3']['inputs'].get('sampler_name', '?')} / {workflow['3']['inputs'].get('scheduler', '?')} steps={workflow['3']['inputs'].get('steps', '?')} cfg={workflow['3']['inputs'].get('cfg', '?')}", "",
f" LoRAs : {' | '.join(active_loras) if active_loras else '(none)'}", " GENERATION SETTINGS:",
f" [+] Positive : {workflow['6']['inputs'].get('text', '')}", f" Seed : {workflow['3']['inputs'].get('seed', '(not set)')}",
f" [-] Negative : {workflow['7']['inputs'].get('text', '')}", f" Resolution : {workflow['5']['inputs'].get('width', '?')} x {workflow['5']['inputs'].get('height', '?')}",
f" Sampler : {workflow['3']['inputs'].get('sampler_name', '?')} / {workflow['3']['inputs'].get('scheduler', '?')}",
f" Steps : {workflow['3']['inputs'].get('steps', '?')}",
f" CFG Scale : {workflow['3']['inputs'].get('cfg', '?')}",
f" Denoise : {workflow['3']['inputs'].get('denoise', '1.0')}",
] ]
# Add LoRA details
if active_loras:
lines.append("")
lines.append(" LORA CONFIGURATION:")
lines.extend(lora_details)
else:
lines.append("")
lines.append(" LORA CONFIGURATION: (none)")
# Add adetailer details
if adetailer_info:
lines.append("")
lines.append(" ADETAILER CONFIGURATION:")
lines.extend(adetailer_info)
# Add prompts
lines.extend([
"",
" PROMPTS:",
f" [+] Positive : {workflow['6']['inputs'].get('text', '')}",
f" [-] Negative : {workflow['7']['inputs'].get('text', '')}",
])
if face_text: if face_text:
lines.append(f" [F] Face : {face_text}") lines.append(f" [F] Face : {face_text}")
if hand_text: if hand_text:
lines.append(f" [H] Hand : {hand_text}") lines.append(f" [H] Hand : {hand_text}")
lines.append(sep) lines.append(sep)
logger.info("\n%s", "\n".join(lines)) logger.info("\n%s", "\n".join(lines))
@@ -2258,9 +2583,16 @@ def _prepare_workflow(workflow, character, prompts, checkpoint=None, custom_nega
if "15" in workflow: if "15" in workflow:
workflow["15"]["inputs"]["text"] = workflow["15"]["inputs"]["text"].replace("{{HAND_PROMPT}}", prompts["hand"]) workflow["15"]["inputs"]["text"] = workflow["15"]["inputs"]["text"].replace("{{HAND_PROMPT}}", prompts["hand"])
# 2. Update Checkpoint # 2. Update Checkpoint - always set one, fall back to default if not provided
if not checkpoint:
default_ckpt, default_ckpt_data = _get_default_checkpoint()
checkpoint = default_ckpt
if not checkpoint_data:
checkpoint_data = default_ckpt_data
if checkpoint: if checkpoint:
workflow["4"]["inputs"]["ckpt_name"] = checkpoint workflow["4"]["inputs"]["ckpt_name"] = checkpoint
else:
raise ValueError("No checkpoint specified and no default checkpoint configured")
# 3. Handle LoRAs - Node 16 for character, Node 17 for outfit, Node 18 for action, Node 19 for style/detailer # 3. Handle LoRAs - Node 16 for character, Node 17 for outfit, Node 18 for action, Node 19 for style/detailer
# Start with direct checkpoint connections # Start with direct checkpoint connections
@@ -2394,18 +2726,38 @@ def _prepare_workflow(workflow, character, prompts, checkpoint=None, custom_nega
return workflow return workflow
def _get_default_checkpoint(): def _get_default_checkpoint():
"""Return (checkpoint_path, checkpoint_data) from the session default, or (None, None).""" """Return (checkpoint_path, checkpoint_data) from the database Settings, session, or fall back to workflow file."""
ckpt_path = session.get('default_checkpoint') ckpt_path = session.get('default_checkpoint')
# If no session checkpoint, try to read from database Settings
if not ckpt_path:
settings = Settings.query.first()
if settings and settings.default_checkpoint:
ckpt_path = settings.default_checkpoint
logger.debug("Loaded default checkpoint from database: %s", ckpt_path)
# If still no checkpoint, try to read from the workflow file
if not ckpt_path:
try:
with open('comfy_workflow.json', 'r') as f:
workflow = json.load(f)
ckpt_path = workflow.get('4', {}).get('inputs', {}).get('ckpt_name')
logger.debug("Loaded default checkpoint from workflow file: %s", ckpt_path)
except Exception:
pass
if not ckpt_path: if not ckpt_path:
return None, None return None, None
ckpt = Checkpoint.query.filter_by(checkpoint_path=ckpt_path).first() ckpt = Checkpoint.query.filter_by(checkpoint_path=ckpt_path).first()
if not ckpt: if not ckpt:
return None, None # Checkpoint path exists but not in DB - return path with empty data
return ckpt_path, {}
return ckpt.checkpoint_path, ckpt.data or {} return ckpt.checkpoint_path, ckpt.data or {}
@app.route('/get_missing_characters') @app.route('/get_missing_characters')
def get_missing_characters(): def get_missing_characters():
missing = Character.query.filter((Character.image_path == None) | (Character.image_path == '')).order_by(Character.filename).all() missing = Character.query.filter((Character.image_path == None) | (Character.image_path == '')).order_by(Character.name).all()
return {'missing': [{'slug': c.slug, 'name': c.name} for c in missing]} return {'missing': [{'slug': c.slug, 'name': c.name} for c in missing]}
@app.route('/clear_all_covers', methods=['POST']) @app.route('/clear_all_covers', methods=['POST'])
@@ -2420,7 +2772,7 @@ def clear_all_covers():
def generate_missing(): def generate_missing():
missing = Character.query.filter( missing = Character.query.filter(
(Character.image_path == None) | (Character.image_path == '') (Character.image_path == None) | (Character.image_path == '')
).order_by(Character.filename).all() ).order_by(Character.name).all()
if not missing: if not missing:
flash("No characters missing cover images.") flash("No characters missing cover images.")
@@ -2490,7 +2842,7 @@ def save_defaults(slug):
@app.route('/get_missing_outfits') @app.route('/get_missing_outfits')
def get_missing_outfits(): def get_missing_outfits():
missing = Outfit.query.filter((Outfit.image_path == None) | (Outfit.image_path == '')).order_by(Outfit.filename).all() missing = Outfit.query.filter((Outfit.image_path == None) | (Outfit.image_path == '')).order_by(Outfit.name).all()
return {'missing': [{'slug': o.slug, 'name': o.name} for o in missing]} return {'missing': [{'slug': o.slug, 'name': o.name} for o in missing]}
@app.route('/clear_all_outfit_covers', methods=['POST']) @app.route('/clear_all_outfit_covers', methods=['POST'])
@@ -2503,7 +2855,7 @@ def clear_all_outfit_covers():
@app.route('/get_missing_actions') @app.route('/get_missing_actions')
def get_missing_actions(): def get_missing_actions():
missing = Action.query.filter((Action.image_path == None) | (Action.image_path == '')).order_by(Action.filename).all() missing = Action.query.filter((Action.image_path == None) | (Action.image_path == '')).order_by(Action.name).all()
return {'missing': [{'slug': a.slug, 'name': a.name} for a in missing]} return {'missing': [{'slug': a.slug, 'name': a.name} for a in missing]}
@app.route('/clear_all_action_covers', methods=['POST']) @app.route('/clear_all_action_covers', methods=['POST'])
@@ -2516,7 +2868,7 @@ def clear_all_action_covers():
@app.route('/get_missing_scenes') @app.route('/get_missing_scenes')
def get_missing_scenes(): def get_missing_scenes():
missing = Scene.query.filter((Scene.image_path == None) | (Scene.image_path == '')).order_by(Scene.filename).all() missing = Scene.query.filter((Scene.image_path == None) | (Scene.image_path == '')).order_by(Scene.name).all()
return {'missing': [{'slug': s.slug, 'name': s.name} for s in missing]} return {'missing': [{'slug': s.slug, 'name': s.name} for s in missing]}
@app.route('/clear_all_scene_covers', methods=['POST']) @app.route('/clear_all_scene_covers', methods=['POST'])
@@ -2947,7 +3299,8 @@ def get_missing_presets():
@app.route('/outfits') @app.route('/outfits')
def outfits_index(): def outfits_index():
outfits = Outfit.query.order_by(Outfit.name).all() outfits = Outfit.query.order_by(Outfit.name).all()
return render_template('outfits/index.html', outfits=outfits) lora_assignments = _count_outfit_lora_assignments()
return render_template('outfits/index.html', outfits=outfits, lora_assignments=lora_assignments)
@app.route('/outfits/rescan', methods=['POST']) @app.route('/outfits/rescan', methods=['POST'])
def rescan_outfits(): def rescan_outfits():
@@ -4293,12 +4646,12 @@ def replace_style_cover_from_preview(slug):
@app.route('/get_missing_styles') @app.route('/get_missing_styles')
def get_missing_styles(): def get_missing_styles():
missing = Style.query.filter((Style.image_path == None) | (Style.image_path == '')).order_by(Style.filename).all() missing = Style.query.filter((Style.image_path == None) | (Style.image_path == '')).order_by(Style.name).all()
return {'missing': [{'slug': s.slug, 'name': s.name} for s in missing]} return {'missing': [{'slug': s.slug, 'name': s.name} for s in missing]}
@app.route('/get_missing_detailers') @app.route('/get_missing_detailers')
def get_missing_detailers(): def get_missing_detailers():
missing = Detailer.query.filter((Detailer.image_path == None) | (Detailer.image_path == '')).order_by(Detailer.filename).all() missing = Detailer.query.filter((Detailer.image_path == None) | (Detailer.image_path == '')).order_by(Detailer.name).all()
return {'missing': [{'slug': d.slug, 'name': d.name} for d in missing]} return {'missing': [{'slug': d.slug, 'name': d.name} for d in missing]}
@app.route('/clear_all_detailer_covers', methods=['POST']) @app.route('/clear_all_detailer_covers', methods=['POST'])
@@ -4321,7 +4674,7 @@ def clear_all_style_covers():
def generate_missing_styles(): def generate_missing_styles():
missing = Style.query.filter( missing = Style.query.filter(
(Style.image_path == None) | (Style.image_path == '') (Style.image_path == None) | (Style.image_path == '')
).order_by(Style.filename).all() ).order_by(Style.name).all()
if not missing: if not missing:
flash("No styles missing cover images.") flash("No styles missing cover images.")
@@ -5673,7 +6026,7 @@ def save_checkpoint_json(slug):
@app.route('/get_missing_checkpoints') @app.route('/get_missing_checkpoints')
def get_missing_checkpoints(): def get_missing_checkpoints():
missing = Checkpoint.query.filter((Checkpoint.image_path == None) | (Checkpoint.image_path == '')).order_by(Checkpoint.checkpoint_path).all() missing = Checkpoint.query.filter((Checkpoint.image_path == None) | (Checkpoint.image_path == '')).order_by(Checkpoint.name).all()
return {'missing': [{'slug': c.slug, 'name': c.name} for c in missing]} return {'missing': [{'slug': c.slug, 'name': c.name} for c in missing]}
@app.route('/clear_all_checkpoint_covers', methods=['POST']) @app.route('/clear_all_checkpoint_covers', methods=['POST'])
@@ -5790,7 +6143,8 @@ def bulk_create_checkpoints():
@app.route('/looks') @app.route('/looks')
def looks_index(): def looks_index():
looks = Look.query.order_by(Look.name).all() looks = Look.query.order_by(Look.name).all()
return render_template('looks/index.html', looks=looks) look_assignments = _count_look_assignments()
return render_template('looks/index.html', looks=looks, look_assignments=look_assignments)
@app.route('/looks/rescan', methods=['POST']) @app.route('/looks/rescan', methods=['POST'])
def rescan_looks(): def rescan_looks():
@@ -6036,7 +6390,7 @@ def create_look():
@app.route('/get_missing_looks') @app.route('/get_missing_looks')
def get_missing_looks(): def get_missing_looks():
missing = Look.query.filter((Look.image_path == None) | (Look.image_path == '')).order_by(Look.filename).all() missing = Look.query.filter((Look.image_path == None) | (Look.image_path == '')).order_by(Look.name).all()
return {'missing': [{'slug': l.slug, 'name': l.name} for l in missing]} return {'missing': [{'slug': l.slug, 'name': l.name} for l in missing]}
@app.route('/clear_all_look_covers', methods=['POST']) @app.route('/clear_all_look_covers', methods=['POST'])
@@ -6932,6 +7286,7 @@ if __name__ == '__main__':
('lora_dir_scenes', "VARCHAR(500) DEFAULT '/ImageModels/lora/Illustrious/Backgrounds'"), ('lora_dir_scenes', "VARCHAR(500) DEFAULT '/ImageModels/lora/Illustrious/Backgrounds'"),
('lora_dir_detailers', "VARCHAR(500) DEFAULT '/ImageModels/lora/Illustrious/Detailers'"), ('lora_dir_detailers', "VARCHAR(500) DEFAULT '/ImageModels/lora/Illustrious/Detailers'"),
('checkpoint_dirs', "VARCHAR(1000) DEFAULT '/ImageModels/Stable-diffusion/Illustrious,/ImageModels/Stable-diffusion/Noob'"), ('checkpoint_dirs', "VARCHAR(1000) DEFAULT '/ImageModels/Stable-diffusion/Illustrious,/ImageModels/Stable-diffusion/Noob'"),
('default_checkpoint', "VARCHAR(500)"),
] ]
for col_name, col_type in columns_to_add: for col_name, col_type in columns_to_add:
try: try:
@@ -6950,6 +7305,15 @@ if __name__ == '__main__':
db.session.commit() db.session.commit()
print("Created default settings") print("Created default settings")
# Log the default checkpoint on startup
settings = Settings.query.first()
if settings and settings.default_checkpoint:
logger.info("=" * 80)
logger.info("DEFAULT CHECKPOINT loaded from database: %s", settings.default_checkpoint)
logger.info("=" * 80)
else:
logger.info("No default checkpoint set in database")
sync_characters() sync_characters()
sync_outfits() sync_outfits()
sync_actions() sync_actions()

View File

@@ -16,7 +16,7 @@
}, },
"4": { "4": {
"inputs": { "inputs": {
"ckpt_name": "Noob/oneObsession_v19Atypical.safetensors" "ckpt_name": ""
}, },
"class_type": "CheckpointLoaderSimple" "class_type": "CheckpointLoaderSimple"
}, },

View File

@@ -1,6 +1,6 @@
{ {
"character_id": "delinquent_mother_flim13", "character_id": "delinquent_mother_flim13",
"character_name": "Delinquent Mother", "character_name": "Gyaru Mother",
"identity": { "identity": {
"base_specs": "1girl, milf, gyaru, tall", "base_specs": "1girl, milf, gyaru, tall",
"hair": "blonde hair, long hair", "hair": "blonde hair, long hair",
@@ -47,4 +47,4 @@
"Original", "Original",
"flim13" "flim13"
] ]
} }

View File

@@ -22,7 +22,7 @@
"default": { "default": {
"full_body": "", "full_body": "",
"headwear": "", "headwear": "",
"top": "black crop top, blue and silver motorcycle jacket", "top": "black crop top, blue and silver jacket",
"bottom": "black leather pants", "bottom": "black leather pants",
"legwear": "", "legwear": "",
"footwear": "blue sneakers", "footwear": "blue sneakers",
@@ -49,4 +49,4 @@
"KDA", "KDA",
"K-Pop" "K-Pop"
] ]
} }

View File

@@ -1,11 +1,11 @@
{ {
"checkpoint_path": "Illustrious/zukiNewCuteILL_newV20.safetensors",
"checkpoint_name": "zukiNewCuteILL_newV20.safetensors",
"base_positive": "anime",
"base_negative": "text, logo", "base_negative": "text, logo",
"steps": 25, "base_positive": "anime, cute, loli, moe",
"cfg": 5, "cfg": 5,
"checkpoint_name": "zukiNewCuteILL_newV20.safetensors",
"checkpoint_path": "Illustrious/zukiNewCuteILL_newV20.safetensors",
"sampler_name": "euler_ancestral",
"scheduler": "normal", "scheduler": "normal",
"sampler_name": "euler_ancestral", "steps": 25,
"vae": "integrated" "vae": "integrated"
} }

View File

@@ -1,23 +1,13 @@
{ {
"outfit_id": "golddripnunchaindresslingerieill",
"outfit_name": "Golddripnunchaindresslingerieill",
"wardrobe": {
"full_body": "revealing nun dress with gold drip accents",
"headwear": "nun veil, jewelry",
"top": "lingerie top, gold chains",
"bottom": "skirt, gold trim",
"legwear": "thighhighs, garter straps",
"footwear": "heels",
"hands": "",
"accessories": "gold chains, cross necklace, body chain"
},
"lora": { "lora": {
"lora_name": "Illustrious/Clothing/GoldDripNunChainDressLingerieILL.safetensors", "lora_name": "",
"lora_triggers": "",
"lora_weight": 0.8, "lora_weight": 0.8,
"lora_triggers": "GoldDripNunChainDressLingerieILL", "lora_weight_max": 0.8,
"lora_weight_min": 0.8, "lora_weight_min": 0.8
"lora_weight_max": 0.8
}, },
"outfit_id": "golddripnunchaindresslingerieill",
"outfit_name": "Nun (with Gold)",
"tags": [ "tags": [
"nun", "nun",
"veil", "veil",
@@ -33,5 +23,15 @@
"dripping", "dripping",
"gold", "gold",
"body_chain" "body_chain"
] ],
} "wardrobe": {
"accessories": "gold chains, cross necklace, body chain",
"bottom": "skirt, gold trim",
"footwear": "heels",
"full_body": "revealing nun dress with gold drip accents",
"hands": "",
"headwear": "nun veil, jewelry",
"legwear": "thighhighs, garter straps",
"top": "lingerie top, gold chains"
}
}

View File

@@ -1,9 +1,7 @@
{ {
"character_id": null,
"look_id": "jn_tron_bonne_illus", "look_id": "jn_tron_bonne_illus",
"look_name": "Jn Tron Bonne Illus", "look_name": "Jn Tron Bonne Illus",
"character_id": "",
"positive": "tron_bonne_(mega_man), brown_hair, short_hair, spiked_hair, goggles_on_head, pink_jacket, crop_top, midriff, navel, skull_print, pink_shorts, boots, large_earrings, servbot_(mega_man)",
"negative": "pubic hair, 3d, realistic, loli, censored, bad anatomy, sketch, monochrome",
"lora": { "lora": {
"lora_name": "Illustrious/Looks/JN_Tron_Bonne_Illus.safetensors", "lora_name": "Illustrious/Looks/JN_Tron_Bonne_Illus.safetensors",
"lora_weight": 0.8, "lora_weight": 0.8,
@@ -11,6 +9,8 @@
"lora_weight_min": 0.8, "lora_weight_min": 0.8,
"lora_weight_max": 0.8 "lora_weight_max": 0.8
}, },
"negative": "pubic hair, 3d, realistic, loli, censored, bad anatomy, sketch, monochrome",
"positive": "tron_bonne_(mega_man), brown_hair, short_hair, spiked_hair, purple cropped jacket, pantyhose, metal panties, short pink dress, boots, skull_earrings, servbot_(mega_man)",
"tags": [ "tags": [
"tron_bonne_(mega_man)", "tron_bonne_(mega_man)",
"goggles_on_head", "goggles_on_head",
@@ -26,4 +26,4 @@
"brown_hair", "brown_hair",
"short_hair" "short_hair"
] ]
} }

View File

@@ -55,7 +55,7 @@
} }
}, },
"style": { "style": {
"style_id": null, "style_id": "random",
"use_lora": true "use_lora": true
}, },
"scene": { "scene": {
@@ -81,4 +81,4 @@
"checkpoint_path": null "checkpoint_path": null
}, },
"tags": [] "tags": []
} }

View File

@@ -2,8 +2,8 @@
"style_id": "7b_style", "style_id": "7b_style",
"style_name": "7B Dream", "style_name": "7B Dream",
"style": { "style": {
"artist_name": "7b_Dream", "artist_name": "7b",
"artistic_style": "3d" "artistic_style": "3d, blender, semi-realistic"
}, },
"lora": { "lora": {
"lora_name": "Illustrious/Styles/7b-style.safetensors", "lora_name": "Illustrious/Styles/7b-style.safetensors",
@@ -12,4 +12,4 @@
"lora_weight_min": 1.0, "lora_weight_min": 1.0,
"lora_weight_max": 1.0 "lora_weight_max": 1.0
} }
} }

View File

@@ -1,15 +0,0 @@
{
"style_id": "bckiwi_3d_style_il_2_7_rank16_fp16",
"style_name": "Bckiwi 3D Style Il 2 7 Rank16 Fp16",
"style": {
"artist_name": "",
"artistic_style": ""
},
"lora": {
"lora_name": "Illustrious/Styles/BCkiwi_3D_style_IL_2.7_rank16_fp16.safetensors",
"lora_weight": 1.0,
"lora_triggers": "BCkiwi_3D_style_IL_2.7_rank16_fp16",
"lora_weight_min": 1.0,
"lora_weight_max": 1.0
}
}

View File

@@ -13,6 +13,9 @@ services:
# ComfyUI runs on the Docker host # ComfyUI runs on the Docker host
COMFYUI_URL: http://10.0.0.200:8188 # Compose manages danbooru-mcp — skip the app's auto-start logic COMFYUI_URL: http://10.0.0.200:8188 # Compose manages danbooru-mcp — skip the app's auto-start logic
SKIP_MCP_AUTOSTART: "true" SKIP_MCP_AUTOSTART: "true"
# Enable debug logging
FLASK_DEBUG: "1"
LOG_LEVEL: "DEBUG"
volumes: volumes:
# Persistent data # Persistent data
- ./data:/app/data - ./data:/app/data

0
launch.sh Normal file → Executable file
View File

View File

@@ -154,6 +154,8 @@ class Settings(db.Model):
lora_dir_detailers = db.Column(db.String(500), default='/ImageModels/lora/Illustrious/Detailers') lora_dir_detailers = db.Column(db.String(500), default='/ImageModels/lora/Illustrious/Detailers')
# Checkpoint scan directories (comma-separated list of absolute paths) # Checkpoint scan directories (comma-separated list of absolute paths)
checkpoint_dirs = db.Column(db.String(1000), default='/ImageModels/Stable-diffusion/Illustrious,/ImageModels/Stable-diffusion/Noob') checkpoint_dirs = db.Column(db.String(1000), default='/ImageModels/Stable-diffusion/Illustrious,/ImageModels/Stable-diffusion/Noob')
# Default checkpoint path (persisted across server restarts)
default_checkpoint = db.Column(db.String(500), nullable=True)
def __repr__(self): def __repr__(self):
return '<Settings>' return '<Settings>'

View File

@@ -356,6 +356,34 @@ h5, h6 { color: var(--text); }
object-fit: cover; object-fit: cover;
} }
/* Assignment badge — shows count of characters using this resource */
.assignment-badge {
position: absolute;
top: 8px;
right: 8px;
background: linear-gradient(135deg, var(--accent) 0%, var(--accent-dim) 100%);
color: #fff;
font-size: 0.7rem;
font-weight: 700;
min-width: 22px;
height: 22px;
border-radius: 11px;
display: flex;
align-items: center;
justify-content: center;
padding: 0 6px;
line-height: 1;
z-index: 2;
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.4);
border: 1.5px solid rgba(255, 255, 255, 0.15);
transition: transform 0.2s ease, box-shadow 0.2s ease;
}
.character-card:hover .assignment-badge {
transform: scale(1.1);
box-shadow: 0 4px 12px rgba(139, 126, 255, 0.5);
}
/* Generator result container */ /* Generator result container */
#result-container { #result-container {
background-color: var(--bg-raised) !important; background-color: var(--bg-raised) !important;

View File

@@ -160,7 +160,10 @@
statusText.textContent = `0 / ${jobs.length} done`; statusText.textContent = `0 / ${jobs.length} done`;
let completed = 0; let completed = 0;
let currentItem = '';
await Promise.all(jobs.map(async ({ item, jobId }) => { await Promise.all(jobs.map(async ({ item, jobId }) => {
currentItem = item.name;
itemNameText.textContent = `Processing: ${currentItem}`;
try { try {
const jobResult = await waitForJob(jobId); const jobResult = await waitForJob(jobId);
if (jobResult.result && jobResult.result.image_url) { if (jobResult.result && jobResult.result.image_url) {

View File

@@ -115,53 +115,68 @@
regenAllBtn.disabled = true; regenAllBtn.disabled = true;
container.classList.remove('d-none'); container.classList.remove('d-none');
let completed = 0; // Phase 1: Queue all jobs upfront
for (const ckpt of missing) { progressBar.style.width = '100%';
const percent = Math.round((completed / missing.length) * 100); progressBar.textContent = '';
progressBar.style.width = `${percent}%`; progressBar.classList.add('progress-bar-striped', 'progress-bar-animated');
progressBar.textContent = `${percent}%`; nodeStatus.textContent = 'Queuing…';
statusText.textContent = `Batch Generating: ${completed + 1} / ${missing.length}`;
ckptNameText.textContent = `Current: ${ckpt.name}`;
nodeStatus.textContent = 'Queuing…';
taskProgressBar.style.width = '100%';
taskProgressBar.textContent = '';
taskProgressBar.classList.add('progress-bar-striped', 'progress-bar-animated');
const jobs = [];
for (const ckpt of missing) {
statusText.textContent = `Queuing ${jobs.length + 1} / ${missing.length}`;
try { try {
const genResp = await fetch(`/checkpoint/${ckpt.slug}/generate`, { const genResp = await fetch(`/checkpoint/${ckpt.slug}/generate`, {
method: 'POST', method: 'POST',
body: new URLSearchParams({ 'character_slug': '__random__' }), body: new URLSearchParams({ character_slug: '__random__' }),
headers: { 'X-Requested-With': 'XMLHttpRequest' } headers: { 'X-Requested-With': 'XMLHttpRequest' }
}); });
const genData = await genResp.json(); const genData = await genResp.json();
currentJobId = genData.job_id; if (genData.job_id) jobs.push({ item: ckpt, jobId: genData.job_id });
} catch (err) {
console.error(`Failed to queue ${ckpt.name}:`, err);
}
}
const jobResult = await waitForJob(currentJobId); // Phase 2: Poll all concurrently
currentJobId = null; progressBar.classList.remove('progress-bar-striped', 'progress-bar-animated');
progressBar.style.width = '0%';
progressBar.textContent = '0%';
statusText.textContent = `0 / ${jobs.length} done`;
let completed = 0;
let currentItem = '';
await Promise.all(jobs.map(async ({ item, jobId }) => {
currentItem = item.name;
ckptNameText.textContent = `Processing: ${currentItem}`;
try {
const jobResult = await waitForJob(jobId);
if (jobResult.result && jobResult.result.image_url) { if (jobResult.result && jobResult.result.image_url) {
const img = document.getElementById(`img-${ckpt.slug}`); const img = document.getElementById(`img-${item.slug}`);
const noImgSpan = document.getElementById(`no-img-${ckpt.slug}`); const noImgSpan = document.getElementById(`no-img-${item.slug}`);
if (img) { img.src = jobResult.result.image_url; img.classList.remove('d-none'); } if (img) { img.src = jobResult.result.image_url; img.classList.remove('d-none'); }
if (noImgSpan) noImgSpan.classList.add('d-none'); if (noImgSpan) noImgSpan.classList.add('d-none');
} }
} catch (err) { } catch (err) {
console.error(`Failed for ${ckpt.name}:`, err); console.error(`Failed for ${item.name}:`, err);
currentJobId = null;
} }
completed++; completed++;
} const pct = Math.round((completed / jobs.length) * 100);
progressBar.style.width = `${pct}%`;
progressBar.textContent = `${pct}%`;
statusText.textContent = `${completed} / ${jobs.length} done`;
}));
progressBar.style.width = '100%'; progressBar.style.width = '100%';
progressBar.textContent = '100%'; progressBar.textContent = '100%';
statusText.textContent = 'Batch Generation Complete!'; statusText.textContent = 'Batch Checkpoint Generation Complete!';
ckptNameText.textContent = ''; ckptNameText.textContent = '';
nodeStatus.textContent = 'Done'; nodeStatus.textContent = 'Done';
stepProgressText.textContent = '';
taskProgressBar.style.width = '0%'; taskProgressBar.style.width = '0%';
taskProgressBar.textContent = ''; taskProgressBar.textContent = '';
batchBtn.disabled = false; batchBtn.disabled = false;
regenAllBtn.disabled = false; regenAllBtn.disabled = false;
setTimeout(() => { container.classList.add('d-none'); }, 5000); setTimeout(() => container.classList.add('d-none'), 5000);
} }
batchBtn.addEventListener('click', async () => { batchBtn.addEventListener('click', async () => {

View File

@@ -162,7 +162,10 @@
statusText.textContent = `0 / ${jobs.length} done`; statusText.textContent = `0 / ${jobs.length} done`;
let completed = 0; let completed = 0;
let currentItem = '';
await Promise.all(jobs.map(async ({ item, jobId }) => { await Promise.all(jobs.map(async ({ item, jobId }) => {
currentItem = item.name;
detailerNameText.textContent = `Processing: ${currentItem}`;
try { try {
const jobResult = await waitForJob(jobId); const jobResult = await waitForJob(jobId);
if (jobResult.result && jobResult.result.image_url) { if (jobResult.result && jobResult.result.image_url) {

View File

@@ -161,7 +161,10 @@
statusText.textContent = `0 / ${jobs.length} done`; statusText.textContent = `0 / ${jobs.length} done`;
let completed = 0; let completed = 0;
let currentItem = '';
await Promise.all(jobs.map(async ({ item, jobId }) => { await Promise.all(jobs.map(async ({ item, jobId }) => {
currentItem = item.name;
charNameText.textContent = `Processing: ${currentItem}`;
try { try {
const jobResult = await waitForJob(jobId); const jobResult = await waitForJob(jobId);
if (jobResult.result && jobResult.result.image_url) { if (jobResult.result && jobResult.result.image_url) {

View File

@@ -45,6 +45,10 @@
<span class="status-dot status-checking"></span> <span class="status-dot status-checking"></span>
<span class="status-label d-none d-xl-inline">MCP</span> <span class="status-label d-none d-xl-inline">MCP</span>
</span> </span>
<span id="status-llm" class="service-status" title="LLM" data-bs-toggle="tooltip" data-bs-placement="bottom" data-bs-title="LLM: checking…">
<span class="status-dot status-checking"></span>
<span class="status-label d-none d-xl-inline">LLM</span>
</span>
</div> </div>
</div> </div>
</nav> </nav>
@@ -343,6 +347,7 @@
const services = [ const services = [
{ id: 'status-comfyui', url: '/api/status/comfyui', label: 'ComfyUI' }, { id: 'status-comfyui', url: '/api/status/comfyui', label: 'ComfyUI' },
{ id: 'status-mcp', url: '/api/status/mcp', label: 'Danbooru MCP' }, { id: 'status-mcp', url: '/api/status/mcp', label: 'Danbooru MCP' },
{ id: 'status-llm', url: '/api/status/llm', label: 'LLM' },
]; ];
function setStatus(id, label, ok) { function setStatus(id, label, ok) {

View File

@@ -59,6 +59,9 @@
<img id="img-{{ look.slug }}" src="" alt="{{ look.name }}" class="d-none"> <img id="img-{{ look.slug }}" src="" alt="{{ look.name }}" class="d-none">
<span id="no-img-{{ look.slug }}" class="text-muted">No Image</span> <span id="no-img-{{ look.slug }}" class="text-muted">No Image</span>
{% endif %} {% endif %}
{% if look_assignments.get(look.look_id, 0) > 0 %}
<span class="assignment-badge" title="Assigned to {{ look_assignments.get(look.look_id, 0) }} character(s)">{{ look_assignments.get(look.look_id, 0) }}</span>
{% endif %}
</div> </div>
<div class="card-body"> <div class="card-body">
<h5 class="card-title text-center">{{ look.name }}</h5> <h5 class="card-title text-center">{{ look.name }}</h5>
@@ -106,6 +109,22 @@
const stepProgressText = document.getElementById('current-step-progress'); const stepProgressText = document.getElementById('current-step-progress');
let currentJobId = null; let currentJobId = null;
let queuePollInterval = null;
async function updateCurrentJobLabel() {
try {
const resp = await fetch('/api/queue');
const data = await resp.json();
const processingJob = data.jobs.find(j => j.status === 'processing');
if (processingJob) {
itemNameText.textContent = `Processing: ${processingJob.label}`;
} else {
itemNameText.textContent = '';
}
} catch (err) {
console.error('Failed to fetch queue:', err);
}
}
async function waitForJob(jobId) { async function waitForJob(jobId) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@@ -136,30 +155,42 @@
regenAllBtn.disabled = true; regenAllBtn.disabled = true;
container.classList.remove('d-none'); container.classList.remove('d-none');
let completed = 0; // Phase 1: Queue all jobs upfront
for (const item of missing) { progressBar.style.width = '100%';
const percent = Math.round((completed / missing.length) * 100); progressBar.textContent = '';
progressBar.style.width = `${percent}%`; progressBar.classList.add('progress-bar-striped', 'progress-bar-animated');
progressBar.textContent = `${percent}%`; nodeStatus.textContent = 'Queuing…';
statusText.textContent = `Batch Generating Looks: ${completed + 1} / ${missing.length}`;
itemNameText.textContent = `Current: ${item.name}`;
nodeStatus.textContent = "Queuing…";
taskProgressBar.style.width = '100%';
taskProgressBar.textContent = '';
taskProgressBar.classList.add('progress-bar-striped', 'progress-bar-animated');
const jobs = [];
for (const item of missing) {
statusText.textContent = `Queuing ${jobs.length + 1} / ${missing.length}`;
try { try {
const genResp = await fetch(`/look/${item.slug}/generate`, { const genResp = await fetch(`/look/${item.slug}/generate`, {
method: 'POST', method: 'POST',
body: new URLSearchParams({ 'action': 'replace' }), body: new URLSearchParams({ action: 'replace' }),
headers: { 'X-Requested-With': 'XMLHttpRequest' } headers: { 'X-Requested-With': 'XMLHttpRequest' }
}); });
const genData = await genResp.json(); const genData = await genResp.json();
currentJobId = genData.job_id; if (genData.job_id) jobs.push({ item, jobId: genData.job_id });
} catch (err) {
console.error(`Failed to queue ${item.name}:`, err);
}
}
const jobResult = await waitForJob(currentJobId); // Phase 2: Poll all concurrently
currentJobId = null; progressBar.classList.remove('progress-bar-striped', 'progress-bar-animated');
progressBar.style.width = '0%';
progressBar.textContent = '0%';
statusText.textContent = `0 / ${jobs.length} done`;
// Start polling queue for current job label
queuePollInterval = setInterval(updateCurrentJobLabel, 1000);
updateCurrentJobLabel(); // Initial update
let completed = 0;
await Promise.all(jobs.map(async ({ item, jobId }) => {
try {
const jobResult = await waitForJob(jobId);
if (jobResult.result && jobResult.result.image_url) { if (jobResult.result && jobResult.result.image_url) {
const img = document.getElementById(`img-${item.slug}`); const img = document.getElementById(`img-${item.slug}`);
const noImgSpan = document.getElementById(`no-img-${item.slug}`); const noImgSpan = document.getElementById(`no-img-${item.slug}`);
@@ -168,22 +199,31 @@
} }
} catch (err) { } catch (err) {
console.error(`Failed for ${item.name}:`, err); console.error(`Failed for ${item.name}:`, err);
currentJobId = null;
} }
completed++; completed++;
const pct = Math.round((completed / jobs.length) * 100);
progressBar.style.width = `${pct}%`;
progressBar.textContent = `${pct}%`;
statusText.textContent = `${completed} / ${jobs.length} done`;
}));
// Stop polling queue
if (queuePollInterval) {
clearInterval(queuePollInterval);
queuePollInterval = null;
} }
progressBar.style.width = '100%'; progressBar.style.width = '100%';
progressBar.textContent = '100%'; progressBar.textContent = '100%';
statusText.textContent = "Batch Look Generation Complete!"; statusText.textContent = 'Batch Look Generation Complete!';
itemNameText.textContent = ""; itemNameText.textContent = '';
nodeStatus.textContent = "Done"; nodeStatus.textContent = 'Done';
stepProgressText.textContent = ""; stepProgressText.textContent = '';
taskProgressBar.style.width = '0%'; taskProgressBar.style.width = '0%';
taskProgressBar.textContent = ''; taskProgressBar.textContent = '';
batchBtn.disabled = false; batchBtn.disabled = false;
regenAllBtn.disabled = false; regenAllBtn.disabled = false;
setTimeout(() => { container.classList.add('d-none'); }, 5000); setTimeout(() => container.classList.add('d-none'), 5000);
} }
batchBtn.addEventListener('click', async () => { batchBtn.addEventListener('click', async () => {

View File

@@ -59,6 +59,9 @@
<img id="img-{{ outfit.slug }}" src="" alt="{{ outfit.name }}" class="d-none"> <img id="img-{{ outfit.slug }}" src="" alt="{{ outfit.name }}" class="d-none">
<span id="no-img-{{ outfit.slug }}" class="text-muted">No Image</span> <span id="no-img-{{ outfit.slug }}" class="text-muted">No Image</span>
{% endif %} {% endif %}
{% if outfit.data.lora and outfit.data.lora.lora_name and lora_assignments.get(outfit.data.lora.lora_name, 0) > 0 %}
<span class="assignment-badge" title="Assigned to {{ lora_assignments.get(outfit.data.lora.lora_name, 0) }} character(s)">{{ lora_assignments.get(outfit.data.lora.lora_name, 0) }}</span>
{% endif %}
</div> </div>
<div class="card-body"> <div class="card-body">
<h5 class="card-title text-center">{{ outfit.name }}</h5> <h5 class="card-title text-center">{{ outfit.name }}</h5>
@@ -160,7 +163,10 @@
statusText.textContent = `0 / ${jobs.length} done`; statusText.textContent = `0 / ${jobs.length} done`;
let completed = 0; let completed = 0;
let currentItem = '';
await Promise.all(jobs.map(async ({ item, jobId }) => { await Promise.all(jobs.map(async ({ item, jobId }) => {
currentItem = item.name;
itemNameText.textContent = `Processing: ${currentItem}`;
try { try {
const jobResult = await waitForJob(jobId); const jobResult = await waitForJob(jobId);
if (jobResult.result && jobResult.result.image_url) { if (jobResult.result && jobResult.result.image_url) {

View File

@@ -160,7 +160,10 @@
statusText.textContent = `0 / ${jobs.length} done`; statusText.textContent = `0 / ${jobs.length} done`;
let completed = 0; let completed = 0;
let currentItem = '';
await Promise.all(jobs.map(async ({ item, jobId }) => { await Promise.all(jobs.map(async ({ item, jobId }) => {
currentItem = item.name;
itemNameText.textContent = `Processing: ${currentItem}`;
try { try {
const jobResult = await waitForJob(jobId); const jobResult = await waitForJob(jobId);
if (jobResult.result && jobResult.result.image_url) { if (jobResult.result && jobResult.result.image_url) {

View File

@@ -160,7 +160,10 @@
statusText.textContent = `0 / ${jobs.length} done`; statusText.textContent = `0 / ${jobs.length} done`;
let completed = 0; let completed = 0;
let currentItem = '';
await Promise.all(jobs.map(async ({ item, jobId }) => { await Promise.all(jobs.map(async ({ item, jobId }) => {
currentItem = item.name;
styleNameText.textContent = `Processing: ${currentItem}`;
try { try {
const jobResult = await waitForJob(jobId); const jobResult = await waitForJob(jobId);
if (jobResult.result && jobResult.result.image_url) { if (jobResult.result && jobResult.result.image_url) {