Files
character-browser/templates/settings.html
Aodhan Collins 0b8802deb5 Add Checkpoints Gallery with per-checkpoint generation settings
- New Checkpoint model (slug, name, checkpoint_path, data JSON, image_path)
- sync_checkpoints() loads metadata from data/checkpoints/*.json and falls
  back to template defaults for models without a JSON file
- _apply_checkpoint_settings() applies per-checkpoint steps, CFG, sampler,
  base positive/negative prompts, and VAE (with dynamic VAELoader node
  injection for non-integrated VAEs) to the ComfyUI workflow
- Bulk Create from Checkpoints: scans Illustrious/Noob model directories,
  reads matching HTML files, uses LLM to populate metadata, falls back to
  template defaults when no HTML is present
- Gallery index with batch cover generation and WebSocket progress bar
- Detail page showing Generation Settings and Base Prompts cards
- Checkpoints nav link added to layout
- New data/prompts/checkpoint_system.txt LLM system prompt
- Updated README with all current galleries and file structure
- Also includes accumulated action/scene JSON updates, new actions, and
  other template/generator improvements from prior sessions

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-02-26 21:25:23 +00:00

197 lines
10 KiB
HTML

{% extends "layout.html" %}
{% block content %}
<div class="container">
<div class="row justify-content-center">
<div class="col-md-8">
<div class="card shadow">
<div class="card-header bg-dark text-white d-flex justify-content-between align-items-center">
<h5 class="mb-0">Application Settings</h5>
<span class="badge bg-primary">LLM Configuration</span>
</div>
<div class="card-body">
<form method="post">
<div class="mb-4">
<label for="llm_provider" class="form-label fw-bold">LLM Provider</label>
<select class="form-select form-select-lg" id="llm_provider" name="llm_provider">
<option value="openrouter" {% if settings.llm_provider == 'openrouter' %}selected{% endif %}>OpenRouter (Cloud)</option>
<option value="ollama" {% if settings.llm_provider == 'ollama' %}selected{% endif %}>Ollama (Local)</option>
<option value="lmstudio" {% if settings.llm_provider == 'lmstudio' %}selected{% endif %}>LMStudio (Local)</option>
</select>
<div class="form-text">Choose where your AI text generation requests are processed.</div>
</div>
<hr>
<!-- OpenRouter Settings -->
<div id="openrouter-settings" class="provider-settings" {% if settings.llm_provider != 'openrouter' %}style="display:none;"{% endif %}>
<h5 class="mb-3 text-primary">OpenRouter Configuration</h5>
<div class="mb-3">
<label for="api_key" class="form-label">API Key</label>
<div class="input-group">
<input type="password" class="form-control" id="api_key" name="api_key" value="{{ settings.openrouter_api_key or '' }}">
<button class="btn btn-outline-primary" type="button" id="connect-openrouter-btn">Load Models</button>
</div>
<div class="form-text">Get your key at <a href="https://openrouter.ai/" target="_blank">openrouter.ai</a></div>
</div>
<div class="mb-3">
<label for="model" class="form-label">Model Selection</label>
<select class="form-select" id="model" name="model">
<option value="{{ settings.openrouter_model }}" selected>{{ settings.openrouter_model }}</option>
</select>
</div>
</div>
<!-- Local LLM Settings (Ollama/LMStudio) -->
<div id="local-settings" class="provider-settings" {% if settings.llm_provider == 'openrouter' %}style="display:none;"{% endif %}>
<h5 class="mb-3 text-primary">Local LLM Configuration</h5>
<div class="mb-3">
<label for="local_base_url" class="form-label">Base URL</label>
<div class="input-group">
<input type="text" class="form-control" id="local_base_url" name="local_base_url"
placeholder="e.g. http://localhost:11434/v1"
value="{{ settings.local_base_url or '' }}">
<button class="btn btn-outline-primary" type="button" id="connect-local-btn">Load Models</button>
</div>
<div id="url-help" class="form-text">
Ollama default: <code>http://localhost:11434/v1</code><br>
LMStudio default: <code>http://localhost:1234/v1</code>
</div>
</div>
<div class="mb-3">
<label for="local_model" class="form-label">Model Selection</label>
<select class="form-select" id="local_model" name="local_model">
{% if settings.local_model %}
<option value="{{ settings.local_model }}" selected>{{ settings.local_model }}</option>
{% else %}
<option value="" selected disabled>Select a model...</option>
{% endif %}
</select>
<div class="form-text">Ensure your local LLM server is running and API is enabled.</div>
</div>
</div>
<div class="d-grid mt-4">
<button type="submit" class="btn btn-primary btn-lg">Save All Settings</button>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
{% endblock %}
{% block scripts %}
<script>
document.addEventListener('DOMContentLoaded', () => {
const providerSelect = document.getElementById('llm_provider');
const openrouterSettings = document.getElementById('openrouter-settings');
const localSettings = document.getElementById('local-settings');
const localBaseUrlInput = document.getElementById('local_base_url');
// Toggle visibility based on provider
providerSelect.addEventListener('change', () => {
if (providerSelect.value === 'openrouter') {
openrouterSettings.style.display = 'block';
localSettings.style.display = 'none';
} else {
openrouterSettings.style.display = 'none';
localSettings.style.display = 'block';
// Auto-fill default URLs if empty
if (!localBaseUrlInput.value) {
if (providerSelect.value === 'ollama') {
localBaseUrlInput.value = 'http://localhost:11434/v1';
} else if (providerSelect.value === 'lmstudio') {
localBaseUrlInput.value = 'http://localhost:1234/v1';
}
}
}
});
// OpenRouter Model Loading
const connectOpenRouterBtn = document.getElementById('connect-openrouter-btn');
const apiKeyInput = document.getElementById('api_key');
const modelSelect = document.getElementById('model');
const currentModel = "{{ settings.openrouter_model }}";
connectOpenRouterBtn.addEventListener('click', async () => {
const apiKey = apiKeyInput.value;
if (!apiKey) { alert('Please enter an API Key first.'); return; }
connectOpenRouterBtn.disabled = true;
connectOpenRouterBtn.textContent = 'Loading...';
try {
const formData = new FormData();
formData.append('api_key', apiKey);
const response = await fetch('/get_openrouter_models', { method: 'POST', body: formData });
const data = await response.json();
if (data.error) {
alert('Error: ' + data.error);
} else {
modelSelect.innerHTML = '';
data.models.sort((a, b) => a.name.localeCompare(b.name)).forEach(model => {
const option = document.createElement('option');
option.value = model.id;
option.textContent = model.name;
if (model.id === currentModel) option.selected = true;
modelSelect.appendChild(option);
});
alert('OpenRouter models loaded successfully!');
}
} catch (err) {
alert('Failed to connect to OpenRouter.');
} finally {
connectOpenRouterBtn.disabled = false;
connectOpenRouterBtn.textContent = 'Load Models';
}
});
// Local Model Loading
const connectLocalBtn = document.getElementById('connect-local-btn');
const localModelSelect = document.getElementById('local_model');
const currentLocalModel = "{{ settings.local_model }}";
connectLocalBtn.addEventListener('click', async () => {
const baseUrl = localBaseUrlInput.value;
if (!baseUrl) { alert('Please enter a Base URL first.'); return; }
connectLocalBtn.disabled = true;
connectLocalBtn.textContent = 'Loading...';
try {
const formData = new FormData();
formData.append('base_url', baseUrl);
const response = await fetch('/get_local_models', { method: 'POST', body: formData });
const data = await response.json();
if (data.error) {
alert('Error: ' + data.error);
} else {
localModelSelect.innerHTML = '';
data.models.forEach(model => {
const option = document.createElement('option');
option.value = model.id;
option.textContent = model.name;
if (model.id === currentLocalModel) option.selected = true;
localModelSelect.appendChild(option);
});
if (data.models.length === 0) alert('No models found at this URL.');
else alert('Local models loaded successfully!');
}
} catch (err) {
alert('Failed to connect to local LLM server. Make sure it is running and CORS is enabled if needed.');
} finally {
connectLocalBtn.disabled = false;
connectLocalBtn.textContent = 'Load Models';
}
});
});
</script>
{% endblock %}