Add semantic tagging, search, favourite/NSFW filtering, and LLM job queue

Replaces old list-format tags (which duplicated prompt content) with structured
dict tags per category (origin_series, outfit_type, participants, style_type,
scene_type, etc.). Tags are now purely organizational metadata — removed from
the prompt pipeline entirely.

Adds is_favourite and is_nsfw columns to all 8 resource models. Favourite is
DB-only (user preference); NSFW is mirrored in JSON tags for rescan persistence.
All library pages get filter controls and favourites-first sorting.

Introduces a parallel LLM job queue (_enqueue_task + _llm_queue_worker) for
background tag regeneration, with the same status polling UI as ComfyUI jobs.
Fixes call_llm() to use has_request_context() fallback for background threads.

Adds global search (/search) across resources and gallery images, with navbar
search bar. Adds gallery image sidecar JSON for per-image favourite/NSFW metadata.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Aodhan Collins
2026-03-21 03:22:09 +00:00
parent 7d79e626a5
commit 32a73b02f5
72 changed files with 3163 additions and 2212 deletions

View File

@@ -149,6 +149,8 @@ def register_routes(app):
'use_lora': request.form.get('outfit_use_lora') == 'on'},
'action': {'action_id': _entity_id(request.form.get('action_id')),
'use_lora': request.form.get('action_use_lora') == 'on',
'suppress_wardrobe': {'true': True, 'false': False, 'random': 'random'}.get(
request.form.get('act_suppress_wardrobe')),
'fields': {k: _tog(request.form.get(f'act_{k}', 'true'))
for k in ['base', 'head', 'upper_body', 'lower_body', 'hands', 'feet', 'additional']}},
'style': {'style_id': _entity_id(request.form.get('style_id')),
@@ -247,11 +249,15 @@ def register_routes(app):
@app.route('/preset/create', methods=['GET', 'POST'])
def create_preset():
form_data = {}
if request.method == 'POST':
name = request.form.get('name', '').strip()
description = request.form.get('description', '').strip()
use_llm = request.form.get('use_llm') == 'on'
form_data = {'name': name, 'description': description, 'use_llm': use_llm}
safe_id = re.sub(r'[^a-zA-Z0-9]+', '_', name.lower()).strip('_') or 'preset'
safe_slug = re.sub(r'[^a-zA-Z0-9_]', '', safe_id)
base_id = safe_id
@@ -265,7 +271,7 @@ def register_routes(app):
system_prompt = load_prompt('preset_system.txt')
if not system_prompt:
flash('Preset system prompt file not found.', 'error')
return redirect(request.url)
return render_template('presets/create.html', form_data=form_data)
try:
llm_response = call_llm(
f"Create a preset profile named '{name}' based on this description: {description}",
@@ -276,7 +282,7 @@ def register_routes(app):
except Exception as e:
logger.exception("LLM error creating preset: %s", e)
flash(f"AI generation failed: {e}", 'error')
return redirect(request.url)
return render_template('presets/create.html', form_data=form_data)
else:
preset_data = {
'character': {'character_id': 'random', 'use_lora': True,
@@ -314,7 +320,7 @@ def register_routes(app):
flash(f"Preset '{name}' created!")
return redirect(url_for('edit_preset', slug=safe_slug))
return render_template('presets/create.html')
return render_template('presets/create.html', form_data=form_data)
@app.route('/get_missing_presets')
def get_missing_presets():